ngram
listlengths
0
82k
[ "noqa: F401 from .context_manager_hook import ContextManagerHook from .counter_hook import CounterHook", "import StackTraceHook from .timer_hook import TimerHook from .tracing_hook import TracingHook", ".runtime_audit_hook import RuntimeAuditHook from .stack_trace_hook import StackTraceHook from .timer_hook import", "F401 from .context_manager_hook import ContextManagerHook from .counter_hook import CounterHook from", "import TracingHook __all__ = [ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\", \"ProfilerHook\", \"StackTraceHook\",", "from .context_manager_hook import ContextManagerHook from .counter_hook import CounterHook from .file_open_hook", "TracingHook __all__ = [ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\", \"ProfilerHook\", \"StackTraceHook\", \"RuntimeAuditHook\",", "import RuntimeAuditHook from .stack_trace_hook import StackTraceHook from .timer_hook import TimerHook", "flake8: noqa: F401 from .context_manager_hook import ContextManagerHook from .counter_hook import", ".counter_hook import CounterHook from .file_open_hook import FileOpenHook from .logging_hook import", "ContextManagerHook from .counter_hook import CounterHook from .file_open_hook import FileOpenHook from", "from .logging_hook import LoggingHook from .profiler_hook import ProfilerHook from .runtime_audit_hook", "= [ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\", \"ProfilerHook\", \"StackTraceHook\", \"RuntimeAuditHook\", \"TimerHook\", \"TracingHook\",", "# flake8: noqa: F401 from .context_manager_hook import ContextManagerHook from .counter_hook", "import LoggingHook from .profiler_hook import ProfilerHook from .runtime_audit_hook import RuntimeAuditHook", "__all__ = [ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\", \"ProfilerHook\", \"StackTraceHook\", \"RuntimeAuditHook\", \"TimerHook\",", "import CounterHook from .file_open_hook import FileOpenHook from .logging_hook import LoggingHook", ".timer_hook import TimerHook from .tracing_hook import TracingHook __all__ = [", "[ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\", \"ProfilerHook\", \"StackTraceHook\", \"RuntimeAuditHook\", \"TimerHook\", \"TracingHook\", ]", "from .stack_trace_hook import StackTraceHook from .timer_hook import TimerHook from .tracing_hook", ".tracing_hook import TracingHook __all__ = [ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\", \"ProfilerHook\",", "RuntimeAuditHook from .stack_trace_hook import StackTraceHook from .timer_hook import TimerHook from", ".file_open_hook import FileOpenHook from .logging_hook import LoggingHook from .profiler_hook import", "import FileOpenHook from .logging_hook import LoggingHook from .profiler_hook import ProfilerHook", "from .counter_hook import CounterHook from .file_open_hook import FileOpenHook from .logging_hook", "import ProfilerHook from .runtime_audit_hook import RuntimeAuditHook from .stack_trace_hook import StackTraceHook", ".stack_trace_hook import StackTraceHook from .timer_hook import TimerHook from .tracing_hook import", "from .profiler_hook import ProfilerHook from .runtime_audit_hook import RuntimeAuditHook from .stack_trace_hook", "CounterHook from .file_open_hook import FileOpenHook from .logging_hook import LoggingHook from", "LoggingHook from .profiler_hook import ProfilerHook from .runtime_audit_hook import RuntimeAuditHook from", ".logging_hook import LoggingHook from .profiler_hook import ProfilerHook from .runtime_audit_hook import", "ProfilerHook from .runtime_audit_hook import RuntimeAuditHook from .stack_trace_hook import StackTraceHook from", "from .file_open_hook import FileOpenHook from .logging_hook import LoggingHook from .profiler_hook", "FileOpenHook from .logging_hook import LoggingHook from .profiler_hook import ProfilerHook from", "import ContextManagerHook from .counter_hook import CounterHook from .file_open_hook import FileOpenHook", "StackTraceHook from .timer_hook import TimerHook from .tracing_hook import TracingHook __all__", "<filename>seagrass/hooks/__init__.py # flake8: noqa: F401 from .context_manager_hook import ContextManagerHook from", "import TimerHook from .tracing_hook import TracingHook __all__ = [ \"CounterHook\",", ".profiler_hook import ProfilerHook from .runtime_audit_hook import RuntimeAuditHook from .stack_trace_hook import", "TimerHook from .tracing_hook import TracingHook __all__ = [ \"CounterHook\", \"FileOpenHook\",", ".context_manager_hook import ContextManagerHook from .counter_hook import CounterHook from .file_open_hook import", "from .runtime_audit_hook import RuntimeAuditHook from .stack_trace_hook import StackTraceHook from .timer_hook", "from .tracing_hook import TracingHook __all__ = [ \"CounterHook\", \"FileOpenHook\", \"LoggingHook\",", "from .timer_hook import TimerHook from .tracing_hook import TracingHook __all__ =" ]
[ "[1., 1.09, 2.865, 3.93, 0.928], [1., 1.25, 3.844, 3.94, 1.009],", "1.22, 3.366, 4.41, 1.137], [5., 1.05, 2.416, 4.64, 1.455], [5.,", "3.037, 3.62, 0.75], [2., 1.05, 2.074, 4.09, 1.036], [2., 1.17,", "1.09, 2.865, 3.93, 0.928], [1., 1.25, 3.844, 3.94, 1.009], [1.,", "dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat, np.ndarray) assert", "assert isinstance(des_mat, np.ndarray) assert des_mat.shape == dat.shape assert isinstance(des_mat_df, np.ndarray)", "0.606], [6., 1.05, 2.199, 3.75, 0.79], [6., 1.02, 2.132, 3.99,", "3.1, 4.57, 1.325], [6., 1.11, 2.813, 3.76, 0.8], [6., 0.75,", "1.07, 2.251, 3.21, 0.562], [6., 1.13, 3.064, 3.63, 0.707], [6.,", "1.14, 3.001, 4.05, 1.023], [4., 1.01, 2.439, 3.92, 1.067], [4.,", "3.76, 0.8], [6., 0.75, 0.84, 3.14, 0.606], [6., 1.05, 2.199,", "1.017], [5., 0.91, 1.532, 4.04, 1.084], [5., 1.15, 2.552, 4.16,", "3.92, 1.067], [4., 0.99, 2.199, 3.27, 0.693], [4., 1.11, 3.318,", "3.85, 1.017], [5., 0.91, 1.532, 4.04, 1.084], [5., 1.15, 2.552,", "[6., 1.13, 3.064, 3.63, 0.707], [6., 1.11, 2.469, 3.95, 0.952]])", "1.476], [3., 1.2, 3.085, 4.78, 1.571], [3., 1.2, 3.308, 4.57,", "3.51, 0.726], [1., 1.11, 3.211, 3.98, 1.209], [1., 1.16, 3.037,", "np.ndarray) assert des_mat_group_df.shape == dat.shape def test_build_matrix(): arr1 = [4,", "1], dat[:, 2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df", "assert des_mat_no_group.shape[1] == 2 assert isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape ==", "[2., 1.17, 2.782, 4.38, 1.197], [2., 1.15, 3.018, 4.65, 1.244],", "3.58, 0.76], [1., 1.19, 2.928, 3.75, 0.821], [1., 1.09, 2.865,", "0])) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:,", "[6., 0.75, 0.84, 3.14, 0.606], [6., 1.05, 2.199, 3.75, 0.79],", "[2., 1.11, 3.378, 4.87, 1.635], [2., 1.25, 3.906, 4.98, 1.517],", "dat[:, 4], group=dat[:, 0]) des_mat_df = _build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4],", "4.05, 1.023], [4., 1.01, 2.439, 3.92, 1.067], [4., 0.99, 2.199,", "isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape == dat.shape assert isinstance(des_mat_no_group, np.ndarray) assert", "0.562], [6., 1.13, 3.064, 3.63, 0.707], [6., 1.11, 2.469, 3.95,", "= test_array() dat_df = pd.DataFrame(dat) des_mat = _build_des_mat(dat[:, 1], dat[:,", "2 assert isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape == dat.shape def test_build_matrix():", "4.42, 1.242], [5., 0.99, 2.079, 3.47, 0.673], [5., 1.22, 3.366,", "3.94, 1.009], [1., 1.11, 3.027, 3.6, 0.766], [1., 1.08, 2.336,", "[5., 0.91, 1.532, 4.04, 1.084], [5., 1.15, 2.552, 4.16, 1.151],", "[3., 1.17, 3.231, 4.56, 1.458], [4., 1.22, 2.838, 3.89, 0.944],", "1.15, 3.018, 4.65, 1.244], [2., 1.17, 3.383, 4.69, 1.495], [2.,", "isinstance(des_mat, np.ndarray) assert des_mat.shape == dat.shape assert isinstance(des_mat_df, np.ndarray) assert", "[5., 1.22, 3.366, 4.41, 1.137], [5., 1.05, 2.416, 4.64, 1.455],", "des_mat.shape == dat.shape assert isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape == dat.shape", "1.17, 3.231, 4.56, 1.458], [4., 1.22, 2.838, 3.89, 0.944], [4.,", "4.69, 1.495], [2., 1.19, 3.447, 4.4, 1.026], [3., 1.07, 2.505,", "0.944], [4., 1.03, 2.351, 4.05, 1.241], [4., 1.14, 3.001, 4.05,", "3.63, 0.707], [6., 1.11, 2.469, 3.95, 0.952]]) return d def", "0.766], [1., 1.08, 2.336, 3.51, 0.726], [1., 1.11, 3.211, 3.98,", "1.05, 2.199, 3.75, 0.79], [6., 1.02, 2.132, 3.99, 0.853], [6.,", "3.211, 3.98, 1.209], [1., 1.16, 3.037, 3.62, 0.75], [2., 1.05,", "4.16, 1.151], [5., 1.14, 3.083, 4.79, 1.381], [5., 1.05, 2.33,", "= [4, 4, 5, 5, 3, 2, 5] arr2 =", "1.949, 3.34, 0.61], [6., 1.07, 2.251, 3.21, 0.562], [6., 1.13,", "2.505, 3.76, 0.912], [3., 0.99, 2.315, 4.44, 1.398], [3., 1.06,", "[6., 1.02, 2.132, 3.99, 0.853], [6., 1.05, 1.949, 3.34, 0.61],", "3.447, 4.4, 1.026], [3., 1.07, 2.505, 3.76, 0.912], [3., 0.99,", "des_mat_no_group = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4])", "1.026], [3., 1.07, 2.505, 3.76, 0.912], [3., 0.99, 2.315, 4.44,", "2.439, 3.92, 1.067], [4., 0.99, 2.199, 3.27, 0.693], [4., 1.11,", "4], group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat, np.ndarray) assert des_mat.shape == dat.shape", "2.39, 4.67, 1.613], [3., 1.15, 3.021, 4.48, 1.476], [3., 1.2,", "3.93, 0.928], [1., 1.25, 3.844, 3.94, 1.009], [1., 1.11, 3.027,", "= pd.DataFrame(dat) des_mat = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3],", "3.018, 4.65, 1.244], [2., 1.17, 3.383, 4.69, 1.495], [2., 1.19,", "4], group=dat[:, 0]) des_mat_df = _build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4], group=dat_df[0])", "[2., 1.25, 3.906, 4.98, 1.517], [2., 1.17, 2.782, 4.38, 1.197],", "[3., 1.07, 2.505, 3.76, 0.912], [3., 0.99, 2.315, 4.44, 1.398],", "test_array() dat_df = pd.DataFrame(dat) des_mat = _build_des_mat(dat[:, 1], dat[:, 2],", "1.506], [3., 1.17, 3.231, 4.56, 1.458], [4., 1.22, 2.838, 3.89,", "1.009], [1., 1.11, 3.027, 3.6, 0.766], [1., 1.08, 2.336, 3.51,", "[3., 1.06, 2.667, 4.38, 1.197], [3., 1.02, 2.39, 4.67, 1.613],", "0.91, 1.532, 4.04, 1.084], [5., 1.15, 2.552, 4.16, 1.151], [5.,", "dat.shape def test_build_matrix(): arr1 = [4, 4, 5, 5, 3,", "test_build_matrix(): arr1 = [4, 4, 5, 5, 3, 2, 5]", "1.197], [2., 1.15, 3.018, 4.65, 1.244], [2., 1.17, 3.383, 4.69,", "[4., 1.2, 3.601, 4.27, 1.242], [4., 1.08, 3.291, 3.85, 1.017],", "group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:,", "4.09, 1.036], [2., 1.17, 2.885, 4.06, 1.094], [2., 1.11, 3.378,", "0.853], [6., 1.05, 1.949, 3.34, 0.61], [6., 1.07, 2.251, 3.21,", "def test_array(): d = np.array([[1., 1.11, 2.569, 3.58, 0.76], [1.,", "3.085, 4.78, 1.571], [3., 1.2, 3.308, 4.57, 1.506], [3., 1.17,", "pd from hypothetical._lib import _build_des_mat def test_array(): d = np.array([[1.,", "1.241], [4., 1.14, 3.001, 4.05, 1.023], [4., 1.01, 2.439, 3.92,", "2.199, 3.27, 0.693], [4., 1.11, 3.318, 3.95, 1.085], [4., 1.2,", "3], dat[:, 4]) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:,", "3.95, 1.085], [4., 1.2, 3.601, 4.27, 1.242], [4., 1.08, 3.291,", "4.78, 1.571], [3., 1.2, 3.308, 4.57, 1.506], [3., 1.17, 3.231,", "1.398], [3., 1.06, 2.667, 4.38, 1.197], [3., 1.02, 2.39, 4.67,", "des_mat_df.shape == dat.shape assert isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1] == 2", "0.75], [2., 1.05, 2.074, 4.09, 1.036], [2., 1.17, 2.885, 4.06,", "2.351, 4.05, 1.241], [4., 1.14, 3.001, 4.05, 1.023], [4., 1.01,", "def test_build_matrix(): arr1 = [4, 4, 5, 5, 3, 2,", "1.25, 3.906, 4.98, 1.517], [2., 1.17, 2.782, 4.38, 1.197], [2.,", "import numpy as np import pandas as pd from hypothetical._lib", "[5., 1.05, 2.416, 4.64, 1.455], [5., 1.13, 3.1, 4.57, 1.325],", "from hypothetical._lib import _build_des_mat def test_array(): d = np.array([[1., 1.11,", "3.76, 0.912], [3., 0.99, 2.315, 4.44, 1.398], [3., 1.06, 2.667,", "0.84, 3.14, 0.606], [6., 1.05, 2.199, 3.75, 0.79], [6., 1.02,", "4, 5, 5, 3, 2, 5] arr2 = [2, 3,", "[5., 1.13, 3.1, 4.57, 1.325], [6., 1.11, 2.813, 3.76, 0.8],", "assert des_mat_df.shape == dat.shape assert isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1] ==", "2.838, 3.89, 0.944], [4., 1.03, 2.351, 4.05, 1.241], [4., 1.14,", "3.021, 4.48, 1.476], [3., 1.2, 3.085, 4.78, 1.571], [3., 1.2,", "dat_df[2], dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:, 1], dat[:, 2],", "[2., 1.17, 3.383, 4.69, 1.495], [2., 1.19, 3.447, 4.4, 1.026],", "4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:,", "1.067], [4., 0.99, 2.199, 3.27, 0.693], [4., 1.11, 3.318, 3.95,", "group=dat[:, 0]) des_mat_df = _build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group", "[3., 1.15, 3.021, 4.48, 1.476], [3., 1.2, 3.085, 4.78, 1.571],", "as np import pandas as pd from hypothetical._lib import _build_des_mat", "1.11, 3.378, 4.87, 1.635], [2., 1.25, 3.906, 4.98, 1.517], [2.,", "assert isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1] == 2 assert isinstance(des_mat, np.ndarray)", "dat[:, 2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df =", "[5., 1.14, 3.083, 4.79, 1.381], [5., 1.05, 2.33, 4.42, 1.242],", "np import pandas as pd from hypothetical._lib import _build_des_mat def", "= _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4]) des_mat_group_df", "2.33, 4.42, 1.242], [5., 0.99, 2.079, 3.47, 0.673], [5., 1.22,", "1.05, 1.949, 3.34, 0.61], [6., 1.07, 2.251, 3.21, 0.562], [6.,", "np.ndarray) assert des_mat.shape == dat.shape assert isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape", "1.07, 2.505, 3.76, 0.912], [3., 0.99, 2.315, 4.44, 1.398], [3.,", "3.383, 4.69, 1.495], [2., 1.19, 3.447, 4.4, 1.026], [3., 1.07,", "1.495], [2., 1.19, 3.447, 4.4, 1.026], [3., 1.07, 2.505, 3.76,", "numpy as np import pandas as pd from hypothetical._lib import", "[2., 1.15, 3.018, 4.65, 1.244], [2., 1.17, 3.383, 4.69, 1.495],", "[1., 1.19, 2.928, 3.75, 0.821], [1., 1.09, 2.865, 3.93, 0.928],", "== 2 assert isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape == dat.shape def", "= np.array([[1., 1.11, 2.569, 3.58, 0.76], [1., 1.19, 2.928, 3.75,", "3.14, 0.606], [6., 1.05, 2.199, 3.75, 0.79], [6., 1.02, 2.132,", "[1., 1.11, 3.027, 3.6, 0.766], [1., 1.08, 2.336, 3.51, 0.726],", "2.251, 3.21, 0.562], [6., 1.13, 3.064, 3.63, 0.707], [6., 1.11,", "3.308, 4.57, 1.506], [3., 1.17, 3.231, 4.56, 1.458], [4., 1.22,", "4.67, 1.613], [3., 1.15, 3.021, 4.48, 1.476], [3., 1.2, 3.085,", "4.38, 1.197], [3., 1.02, 2.39, 4.67, 1.613], [3., 1.15, 3.021,", "1.17, 2.782, 4.38, 1.197], [2., 1.15, 3.018, 4.65, 1.244], [2.,", "[1., 1.25, 3.844, 3.94, 1.009], [1., 1.11, 3.027, 3.6, 0.766],", "1.094], [2., 1.11, 3.378, 4.87, 1.635], [2., 1.25, 3.906, 4.98,", "np.ndarray) assert des_mat_df.shape == dat.shape assert isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1]", "[4., 1.03, 2.351, 4.05, 1.241], [4., 1.14, 3.001, 4.05, 1.023],", "1.635], [2., 1.25, 3.906, 4.98, 1.517], [2., 1.17, 2.782, 4.38,", "3.27, 0.693], [4., 1.11, 3.318, 3.95, 1.085], [4., 1.2, 3.601,", "4.27, 1.242], [4., 1.08, 3.291, 3.85, 1.017], [5., 0.91, 1.532,", "[4., 1.11, 3.318, 3.95, 1.085], [4., 1.2, 3.601, 4.27, 1.242],", "1.455], [5., 1.13, 3.1, 4.57, 1.325], [6., 1.11, 2.813, 3.76,", "pandas as pd from hypothetical._lib import _build_des_mat def test_array(): d", "1.11, 2.469, 3.95, 0.952]]) return d def test_build_design_matrix(): dat =", "1.02, 2.39, 4.67, 1.613], [3., 1.15, 3.021, 4.48, 1.476], [3.,", "[6., 1.05, 2.199, 3.75, 0.79], [6., 1.02, 2.132, 3.99, 0.853],", "0.673], [5., 1.22, 3.366, 4.41, 1.137], [5., 1.05, 2.416, 4.64,", "dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:, 1], dat[:, 2], dat[:,", "1.22, 2.838, 3.89, 0.944], [4., 1.03, 2.351, 4.05, 1.241], [4.,", "3.291, 3.85, 1.017], [5., 0.91, 1.532, 4.04, 1.084], [5., 1.15,", "2.667, 4.38, 1.197], [3., 1.02, 2.39, 4.67, 1.613], [3., 1.15,", "[4, 4, 5, 5, 3, 2, 5] arr2 = [2,", "pd.DataFrame(dat) des_mat = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:,", "1.19, 2.928, 3.75, 0.821], [1., 1.09, 2.865, 3.93, 0.928], [1.,", "4.05, 1.241], [4., 1.14, 3.001, 4.05, 1.023], [4., 1.01, 2.439,", "[6., 1.11, 2.813, 3.76, 0.8], [6., 0.75, 0.84, 3.14, 0.606],", "0.726], [1., 1.11, 3.211, 3.98, 1.209], [1., 1.16, 3.037, 3.62,", "assert des_mat_group_df.shape == dat.shape def test_build_matrix(): arr1 = [4, 4,", "4.48, 1.476], [3., 1.2, 3.085, 4.78, 1.571], [3., 1.2, 3.308,", "[1., 1.16, 3.037, 3.62, 0.75], [2., 1.05, 2.074, 4.09, 1.036],", "2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat, np.ndarray)", "2.199, 3.75, 0.79], [6., 1.02, 2.132, 3.99, 0.853], [6., 1.05,", "= _build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:, 1],", "dat[:, 4], group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat, np.ndarray) assert des_mat.shape ==", "2], dat[:, 3], dat[:, 4], group=dat[:, 0]) des_mat_df = _build_des_mat(dat_df[1],", "des_mat_group_df.shape == dat.shape def test_build_matrix(): arr1 = [4, 4, 5,", "3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat, np.ndarray) assert des_mat.shape", "3.99, 0.853], [6., 1.05, 1.949, 3.34, 0.61], [6., 1.07, 2.251,", "3.6, 0.766], [1., 1.08, 2.336, 3.51, 0.726], [1., 1.11, 3.211,", "[1., 1.11, 3.211, 3.98, 1.209], [1., 1.16, 3.037, 3.62, 0.75],", "4]) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:,", "4.38, 1.197], [2., 1.15, 3.018, 4.65, 1.244], [2., 1.17, 3.383,", "1.381], [5., 1.05, 2.33, 4.42, 1.242], [5., 0.99, 2.079, 3.47,", "0.76], [1., 1.19, 2.928, 3.75, 0.821], [1., 1.09, 2.865, 3.93,", "[5., 1.05, 2.33, 4.42, 1.242], [5., 0.99, 2.079, 3.47, 0.673],", "1.11, 3.027, 3.6, 0.766], [1., 1.08, 2.336, 3.51, 0.726], [1.,", "3.98, 1.209], [1., 1.16, 3.037, 3.62, 0.75], [2., 1.05, 2.074,", "3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:,", "= _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4], group=dat[:,", "0.99, 2.199, 3.27, 0.693], [4., 1.11, 3.318, 3.95, 1.085], [4.,", "1.242], [5., 0.99, 2.079, 3.47, 0.673], [5., 1.22, 3.366, 4.41,", "[4., 1.14, 3.001, 4.05, 1.023], [4., 1.01, 2.439, 3.92, 1.067],", "as pd from hypothetical._lib import _build_des_mat def test_array(): d =", "group=pd.DataFrame(dat[:, 0])) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3],", "2.336, 3.51, 0.726], [1., 1.11, 3.211, 3.98, 1.209], [1., 1.16,", "hypothetical._lib import _build_des_mat def test_array(): d = np.array([[1., 1.11, 2.569,", "isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape == dat.shape def test_build_matrix(): arr1 =", "1.2, 3.085, 4.78, 1.571], [3., 1.2, 3.308, 4.57, 1.506], [3.,", "1], dat[:, 2], dat[:, 3], dat[:, 4], group=dat[:, 0]) des_mat_df", "1.11, 3.318, 3.95, 1.085], [4., 1.2, 3.601, 4.27, 1.242], [4.,", "3], dat[:, 4], group=dat[:, 0]) des_mat_df = _build_des_mat(dat_df[1], dat_df[2], dat_df[3],", "1.05, 2.33, 4.42, 1.242], [5., 0.99, 2.079, 3.47, 0.673], [5.,", "1.325], [6., 1.11, 2.813, 3.76, 0.8], [6., 0.75, 0.84, 3.14,", "_build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:, 1], dat[:,", "3.366, 4.41, 1.137], [5., 1.05, 2.416, 4.64, 1.455], [5., 1.13,", "_build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0]))", "assert des_mat.shape == dat.shape assert isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape ==", "1.458], [4., 1.22, 2.838, 3.89, 0.944], [4., 1.03, 2.351, 4.05,", "_build_des_mat def test_array(): d = np.array([[1., 1.11, 2.569, 3.58, 0.76],", "4.41, 1.137], [5., 1.05, 2.416, 4.64, 1.455], [5., 1.13, 3.1,", "1.01, 2.439, 3.92, 1.067], [4., 0.99, 2.199, 3.27, 0.693], [4.,", "4.57, 1.325], [6., 1.11, 2.813, 3.76, 0.8], [6., 0.75, 0.84,", "3.47, 0.673], [5., 1.22, 3.366, 4.41, 1.137], [5., 1.05, 2.416,", "0.952]]) return d def test_build_design_matrix(): dat = test_array() dat_df =", "1.19, 3.447, 4.4, 1.026], [3., 1.07, 2.505, 3.76, 0.912], [3.,", "des_mat = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4],", "3.34, 0.61], [6., 1.07, 2.251, 3.21, 0.562], [6., 1.13, 3.064,", "2.469, 3.95, 0.952]]) return d def test_build_design_matrix(): dat = test_array()", "0.928], [1., 1.25, 3.844, 3.94, 1.009], [1., 1.11, 3.027, 3.6,", "2.885, 4.06, 1.094], [2., 1.11, 3.378, 4.87, 1.635], [2., 1.25,", "[5., 0.99, 2.079, 3.47, 0.673], [5., 1.22, 3.366, 4.41, 1.137],", "des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4],", "0.61], [6., 1.07, 2.251, 3.21, 0.562], [6., 1.13, 3.064, 3.63,", "1.05, 2.416, 4.64, 1.455], [5., 1.13, 3.1, 4.57, 1.325], [6.,", "0.79], [6., 1.02, 2.132, 3.99, 0.853], [6., 1.05, 1.949, 3.34,", "1.151], [5., 1.14, 3.083, 4.79, 1.381], [5., 1.05, 2.33, 4.42,", "1.03, 2.351, 4.05, 1.241], [4., 1.14, 3.001, 4.05, 1.023], [4.,", "[4., 0.99, 2.199, 3.27, 0.693], [4., 1.11, 3.318, 3.95, 1.085],", "4.65, 1.244], [2., 1.17, 3.383, 4.69, 1.495], [2., 1.19, 3.447,", "arr1 = [4, 4, 5, 5, 3, 2, 5] arr2", "1.11, 2.569, 3.58, 0.76], [1., 1.19, 2.928, 3.75, 0.821], [1.,", "4.87, 1.635], [2., 1.25, 3.906, 4.98, 1.517], [2., 1.17, 2.782,", "return d def test_build_design_matrix(): dat = test_array() dat_df = pd.DataFrame(dat)", "isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1] == 2 assert isinstance(des_mat, np.ndarray) assert", "dat[:, 2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat,", "2.782, 4.38, 1.197], [2., 1.15, 3.018, 4.65, 1.244], [2., 1.17,", "[5., 1.15, 2.552, 4.16, 1.151], [5., 1.14, 3.083, 4.79, 1.381],", "dat_df = pd.DataFrame(dat) des_mat = _build_des_mat(dat[:, 1], dat[:, 2], dat[:,", "1.242], [4., 1.08, 3.291, 3.85, 1.017], [5., 0.91, 1.532, 4.04,", "group=pd.DataFrame(dat[:, 0])) assert isinstance(des_mat, np.ndarray) assert des_mat.shape == dat.shape assert", "5, 3, 2, 5] arr2 = [2, 3, 3, 3,", "2.079, 3.47, 0.673], [5., 1.22, 3.366, 4.41, 1.137], [5., 1.05,", "3.95, 0.952]]) return d def test_build_design_matrix(): dat = test_array() dat_df", "3, 2, 5] arr2 = [2, 3, 3, 3, 3,", "== dat.shape assert isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape == dat.shape assert", "_build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4]) des_mat_group_df =", "[3., 1.2, 3.308, 4.57, 1.506], [3., 1.17, 3.231, 4.56, 1.458],", "0.99, 2.079, 3.47, 0.673], [5., 1.22, 3.366, 4.41, 1.137], [5.,", "1.2, 3.601, 4.27, 1.242], [4., 1.08, 3.291, 3.85, 1.017], [5.,", "[2., 1.19, 3.447, 4.4, 1.026], [3., 1.07, 2.505, 3.76, 0.912],", "2.416, 4.64, 1.455], [5., 1.13, 3.1, 4.57, 1.325], [6., 1.11,", "np.array([[1., 1.11, 2.569, 3.58, 0.76], [1., 1.19, 2.928, 3.75, 0.821],", "3.231, 4.56, 1.458], [4., 1.22, 2.838, 3.89, 0.944], [4., 1.03,", "np.ndarray) assert des_mat_no_group.shape[1] == 2 assert isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape", "0.99, 2.315, 4.44, 1.398], [3., 1.06, 2.667, 4.38, 1.197], [3.,", "test_array(): d = np.array([[1., 1.11, 2.569, 3.58, 0.76], [1., 1.19,", "0])) assert isinstance(des_mat, np.ndarray) assert des_mat.shape == dat.shape assert isinstance(des_mat_df,", "5] arr2 = [2, 3, 3, 3, 3, 3, 3]", "1.08, 3.291, 3.85, 1.017], [5., 0.91, 1.532, 4.04, 1.084], [5.,", "des_mat_df = _build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:,", "4.4, 1.026], [3., 1.07, 2.505, 3.76, 0.912], [3., 0.99, 2.315,", "1.25, 3.844, 3.94, 1.009], [1., 1.11, 3.027, 3.6, 0.766], [1.,", "dat.shape assert isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape == dat.shape assert isinstance(des_mat_no_group,", "1.084], [5., 1.15, 2.552, 4.16, 1.151], [5., 1.14, 3.083, 4.79,", "3.318, 3.95, 1.085], [4., 1.2, 3.601, 4.27, 1.242], [4., 1.08,", "1.036], [2., 1.17, 2.885, 4.06, 1.094], [2., 1.11, 3.378, 4.87,", "2.569, 3.58, 0.76], [1., 1.19, 2.928, 3.75, 0.821], [1., 1.09,", "2.928, 3.75, 0.821], [1., 1.09, 2.865, 3.93, 0.928], [1., 1.25,", "[3., 0.99, 2.315, 4.44, 1.398], [3., 1.06, 2.667, 4.38, 1.197],", "= _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:,", "1.05, 2.074, 4.09, 1.036], [2., 1.17, 2.885, 4.06, 1.094], [2.,", "0.821], [1., 1.09, 2.865, 3.93, 0.928], [1., 1.25, 3.844, 3.94,", "3.001, 4.05, 1.023], [4., 1.01, 2.439, 3.92, 1.067], [4., 0.99,", "3.62, 0.75], [2., 1.05, 2.074, 4.09, 1.036], [2., 1.17, 2.885,", "2.813, 3.76, 0.8], [6., 0.75, 0.84, 3.14, 0.606], [6., 1.05,", "0.693], [4., 1.11, 3.318, 3.95, 1.085], [4., 1.2, 3.601, 4.27,", "dat[:, 3], dat[:, 4], group=dat[:, 0]) des_mat_df = _build_des_mat(dat_df[1], dat_df[2],", "dat = test_array() dat_df = pd.DataFrame(dat) des_mat = _build_des_mat(dat[:, 1],", "1.085], [4., 1.2, 3.601, 4.27, 1.242], [4., 1.08, 3.291, 3.85,", "0.912], [3., 0.99, 2.315, 4.44, 1.398], [3., 1.06, 2.667, 4.38,", "1.15, 2.552, 4.16, 1.151], [5., 1.14, 3.083, 4.79, 1.381], [5.,", "d = np.array([[1., 1.11, 2.569, 3.58, 0.76], [1., 1.19, 2.928,", "4.79, 1.381], [5., 1.05, 2.33, 4.42, 1.242], [5., 0.99, 2.079,", "2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df = _build_des_mat(dat[:,", "3.083, 4.79, 1.381], [5., 1.05, 2.33, 4.42, 1.242], [5., 0.99,", "1.15, 3.021, 4.48, 1.476], [3., 1.2, 3.085, 4.78, 1.571], [3.,", "1.613], [3., 1.15, 3.021, 4.48, 1.476], [3., 1.2, 3.085, 4.78,", "1.197], [3., 1.02, 2.39, 4.67, 1.613], [3., 1.15, 3.021, 4.48,", "0.75, 0.84, 3.14, 0.606], [6., 1.05, 2.199, 3.75, 0.79], [6.,", "2.074, 4.09, 1.036], [2., 1.17, 2.885, 4.06, 1.094], [2., 1.11,", "des_mat_no_group.shape[1] == 2 assert isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape == dat.shape", "3.378, 4.87, 1.635], [2., 1.25, 3.906, 4.98, 1.517], [2., 1.17,", "[6., 1.05, 1.949, 3.34, 0.61], [6., 1.07, 2.251, 3.21, 0.562],", "[1., 1.08, 2.336, 3.51, 0.726], [1., 1.11, 3.211, 3.98, 1.209],", "4.64, 1.455], [5., 1.13, 3.1, 4.57, 1.325], [6., 1.11, 2.813,", "1], dat[:, 2], dat[:, 3], dat[:, 4]) des_mat_group_df = _build_des_mat(dat[:,", "3.75, 0.79], [6., 1.02, 2.132, 3.99, 0.853], [6., 1.05, 1.949,", "1.16, 3.037, 3.62, 0.75], [2., 1.05, 2.074, 4.09, 1.036], [2.,", "[4., 1.22, 2.838, 3.89, 0.944], [4., 1.03, 2.351, 4.05, 1.241],", "import _build_des_mat def test_array(): d = np.array([[1., 1.11, 2.569, 3.58,", "1.06, 2.667, 4.38, 1.197], [3., 1.02, 2.39, 4.67, 1.613], [3.,", "== dat.shape def test_build_matrix(): arr1 = [4, 4, 5, 5,", "2.132, 3.99, 0.853], [6., 1.05, 1.949, 3.34, 0.61], [6., 1.07,", "[6., 1.07, 2.251, 3.21, 0.562], [6., 1.13, 3.064, 3.63, 0.707],", "1.532, 4.04, 1.084], [5., 1.15, 2.552, 4.16, 1.151], [5., 1.14,", "1.13, 3.1, 4.57, 1.325], [6., 1.11, 2.813, 3.76, 0.8], [6.,", "d def test_build_design_matrix(): dat = test_array() dat_df = pd.DataFrame(dat) des_mat", "1], dat[:, 2], dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) assert", "2.552, 4.16, 1.151], [5., 1.14, 3.083, 4.79, 1.381], [5., 1.05,", "4.04, 1.084], [5., 1.15, 2.552, 4.16, 1.151], [5., 1.14, 3.083,", "_build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3], dat[:, 4], group=dat[:, 0])", "0.8], [6., 0.75, 0.84, 3.14, 0.606], [6., 1.05, 2.199, 3.75,", "import pytest import numpy as np import pandas as pd", "1.023], [4., 1.01, 2.439, 3.92, 1.067], [4., 0.99, 2.199, 3.27,", "dat[:, 4]) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3],", "3.027, 3.6, 0.766], [1., 1.08, 2.336, 3.51, 0.726], [1., 1.11,", "dat[:, 3], dat[:, 4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df = _build_des_mat(dat[:, 1],", "dat[:, 3], dat[:, 4]) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2],", "1.11, 2.813, 3.76, 0.8], [6., 0.75, 0.84, 3.14, 0.606], [6.,", "0]) des_mat_df = _build_des_mat(dat_df[1], dat_df[2], dat_df[3], dat_df[4], group=dat_df[0]) des_mat_no_group =", "4.06, 1.094], [2., 1.11, 3.378, 4.87, 1.635], [2., 1.25, 3.906,", "4.98, 1.517], [2., 1.17, 2.782, 4.38, 1.197], [2., 1.15, 3.018,", "4.56, 1.458], [4., 1.22, 2.838, 3.89, 0.944], [4., 1.03, 2.351,", "1.2, 3.308, 4.57, 1.506], [3., 1.17, 3.231, 4.56, 1.458], [4.,", "[3., 1.2, 3.085, 4.78, 1.571], [3., 1.2, 3.308, 4.57, 1.506],", "== dat.shape assert isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1] == 2 assert", "assert isinstance(des_mat, np.ndarray) assert des_mat_group_df.shape == dat.shape def test_build_matrix(): arr1", "dat[:, 2], dat[:, 3], dat[:, 4], group=dat[:, 0]) des_mat_df =", "1.17, 3.383, 4.69, 1.495], [2., 1.19, 3.447, 4.4, 1.026], [3.,", "[3., 1.02, 2.39, 4.67, 1.613], [3., 1.15, 3.021, 4.48, 1.476],", "1.13, 3.064, 3.63, 0.707], [6., 1.11, 2.469, 3.95, 0.952]]) return", "dat_df[4], group=dat_df[0]) des_mat_no_group = _build_des_mat(dat[:, 1], dat[:, 2], dat[:, 3],", "3.844, 3.94, 1.009], [1., 1.11, 3.027, 3.6, 0.766], [1., 1.08,", "0.707], [6., 1.11, 2.469, 3.95, 0.952]]) return d def test_build_design_matrix():", "1.209], [1., 1.16, 3.037, 3.62, 0.75], [2., 1.05, 2.074, 4.09,", "1.08, 2.336, 3.51, 0.726], [1., 1.11, 3.211, 3.98, 1.209], [1.,", "import pandas as pd from hypothetical._lib import _build_des_mat def test_array():", "1.17, 2.885, 4.06, 1.094], [2., 1.11, 3.378, 4.87, 1.635], [2.,", "dat[:, 2], dat[:, 3], dat[:, 4]) des_mat_group_df = _build_des_mat(dat[:, 1],", "2, 5] arr2 = [2, 3, 3, 3, 3, 3,", "dat[:, 4], group=pd.DataFrame(dat[:, 0])) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:, 2],", "3.75, 0.821], [1., 1.09, 2.865, 3.93, 0.928], [1., 1.25, 3.844,", "test_build_design_matrix(): dat = test_array() dat_df = pd.DataFrame(dat) des_mat = _build_des_mat(dat[:,", "1.14, 3.083, 4.79, 1.381], [5., 1.05, 2.33, 4.42, 1.242], [5.,", "1.517], [2., 1.17, 2.782, 4.38, 1.197], [2., 1.15, 3.018, 4.65,", "4.44, 1.398], [3., 1.06, 2.667, 4.38, 1.197], [3., 1.02, 2.39,", "[4., 1.01, 2.439, 3.92, 1.067], [4., 0.99, 2.199, 3.27, 0.693],", "1.02, 2.132, 3.99, 0.853], [6., 1.05, 1.949, 3.34, 0.61], [6.,", "2.865, 3.93, 0.928], [1., 1.25, 3.844, 3.94, 1.009], [1., 1.11,", "3.601, 4.27, 1.242], [4., 1.08, 3.291, 3.85, 1.017], [5., 0.91,", "3.21, 0.562], [6., 1.13, 3.064, 3.63, 0.707], [6., 1.11, 2.469,", "[4., 1.08, 3.291, 3.85, 1.017], [5., 0.91, 1.532, 4.04, 1.084],", "def test_build_design_matrix(): dat = test_array() dat_df = pd.DataFrame(dat) des_mat =", "2], dat[:, 3], dat[:, 4]) des_mat_group_df = _build_des_mat(dat[:, 1], dat[:,", "dat.shape assert isinstance(des_mat_no_group, np.ndarray) assert des_mat_no_group.shape[1] == 2 assert isinstance(des_mat,", "[2., 1.17, 2.885, 4.06, 1.094], [2., 1.11, 3.378, 4.87, 1.635],", "4.57, 1.506], [3., 1.17, 3.231, 4.56, 1.458], [4., 1.22, 2.838,", "5, 5, 3, 2, 5] arr2 = [2, 3, 3,", "[2., 1.05, 2.074, 4.09, 1.036], [2., 1.17, 2.885, 4.06, 1.094],", "2.315, 4.44, 1.398], [3., 1.06, 2.667, 4.38, 1.197], [3., 1.02,", "assert isinstance(des_mat_df, np.ndarray) assert des_mat_df.shape == dat.shape assert isinstance(des_mat_no_group, np.ndarray)", "3.89, 0.944], [4., 1.03, 2.351, 4.05, 1.241], [4., 1.14, 3.001,", "1.571], [3., 1.2, 3.308, 4.57, 1.506], [3., 1.17, 3.231, 4.56,", "3.906, 4.98, 1.517], [2., 1.17, 2.782, 4.38, 1.197], [2., 1.15,", "1.11, 3.211, 3.98, 1.209], [1., 1.16, 3.037, 3.62, 0.75], [2.,", "1.244], [2., 1.17, 3.383, 4.69, 1.495], [2., 1.19, 3.447, 4.4,", "3.064, 3.63, 0.707], [6., 1.11, 2.469, 3.95, 0.952]]) return d", "1.137], [5., 1.05, 2.416, 4.64, 1.455], [5., 1.13, 3.1, 4.57,", "pytest import numpy as np import pandas as pd from", "[6., 1.11, 2.469, 3.95, 0.952]]) return d def test_build_design_matrix(): dat", "<reponame>aschleg/hypy<filename>tests/test_internal.py import pytest import numpy as np import pandas as" ]
[ "url = \"https://docs.google.com/uc?export=download\" session = requests.Session() response = session.get(url, params={'id':", "sys import requests import os import time import urllib.request import", "license. # For a copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download big files", "# https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination): chunk_size = 32768 written_size =", "/ (1024 * duration)) percent = int(count * block_size *", "print('Now unzipping...Wait for 2 minutes ish...!') return 0 if __name__", "(destination, sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80, 20)).columns - len(message) sys.stdout.write('\\r' +", "f.write(chunk) written_size += chunk_size print_status(destination, written_size) print('Done.') def get_confirm_token(response): for", "= session.get(url, params={'id': id}, stream=True) token = get_confirm_token(response) if token:", "params = {'id': id, 'confirm': token} response = session.get(url, params=params,", "\"%3.2f%s%s\" % (num, unit, suffix) num /= 1000.0 return \"%.2f%s%s\"", "filter out keep-alive new chunks f.write(chunk) written_size += chunk_size print_status(destination,", "open(destination, \"wb\") as f: for chunk in response.iter_content(chunk_size): if chunk:", "= int(progress_size / (1024 * duration)) percent = int(count *", "unit, suffix) num /= 1000.0 return \"%.2f%s%s\" % (num, 'Yi',", "model model_path = './cls_model.pth' if os.path.isfile(model_path): print('Model file already downloaded", "progress_size / (1024 * 1024), speed, duration)) sys.stdout.flush() def sizeof_fmt(num,", "shutil.get_terminal_size((80, 20)).columns - len(message) sys.stdout.write('\\r' + message + empty_space *", "seconds passed\" % (percent, progress_size / (1024 * 1024), speed,", "%s... %s\" % (destination, sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80, 20)).columns -", "new chunks f.write(chunk) written_size += chunk_size print_status(destination, written_size) print('Done.') def", "20)).columns - len(message) sys.stdout.write('\\r' + message + empty_space * '", "if key.startswith('download_warning'): return value return None url = \"https://docs.google.com/uc?export=download\" session", "else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) # unzip dataset", "- start_time progress_size = int(count * block_size) speed = int(progress_size", "Universitat Autonoma de # Barcelona (UAB). # # This work", "= 32768 written_size = 0 with open(destination, \"wb\") as f:", "save_response_content(response, destination) def download_contents(): # download model model_path = './cls_model.pth'", "download_contents(): # download model model_path = './cls_model.pth' if os.path.isfile(model_path): print('Model", "(c) 2017 Computer Vision Center (CVC) at the Universitat Autonoma", "zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait for 2 minutes ish...!') return 0", "already downloaded in', dataset_path) else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url),", "(percent, progress_size / (1024 * 1024), speed, duration)) sys.stdout.flush() def", "Computer Vision Center (CVC) at the Universitat Autonoma de #", "in response.cookies.items(): if key.startswith('download_warning'): return value return None url =", "= {'id': id, 'confirm': token} response = session.get(url, params=params, stream=True)", "0 with open(destination, \"wb\") as f: for chunk in response.iter_content(chunk_size):", "# download dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset file", "if os.path.isfile(dataset_path): print('Dataset file already downloaded in', dataset_path) else: dataset_url", "* 1024), speed, duration)) sys.stdout.flush() def sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925", "= zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait for 2 minutes", "import zipfile def reporthook(count, block_size, total_size): global start_time if count", "int(progress_size / (1024 * duration)) percent = int(count * block_size", "downloaded in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download dataset dataset_path", "< 1000.0: return \"%3.2f%s%s\" % (num, unit, suffix) num /=", "download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination): chunk_size = 32768", "= session.get(url, params=params, stream=True) save_response_content(response, destination) def download_contents(): # download", "time.time() return duration = time.time() - start_time progress_size = int(count", "(1024 * 1024), speed, duration)) sys.stdout.flush() def sizeof_fmt(num, suffix='B'): #", "= shutil.get_terminal_size((80, 20)).columns - len(message) sys.stdout.write('\\r' + message + empty_space", "= get_confirm_token(response) if token: params = {'id': id, 'confirm': token}", "dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) # unzip dataset zip_ref", "file already downloaded in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download", "model_path = './cls_model.pth' if os.path.isfile(model_path): print('Model file already downloaded in',", "params=params, stream=True) save_response_content(response, destination) def download_contents(): # download model model_path", "https://stackoverflow.com/a/1094933/5308925 for unit in ['','K','M','G','T','P','E','Z']: if abs(num) < 1000.0: return", "download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path):", "if token: params = {'id': id, 'confirm': token} response =", "import requests import os import time import urllib.request import zipfile", "suffix) num /= 1000.0 return \"%.2f%s%s\" % (num, 'Yi', suffix)", "python3 # Copyright (c) 2017 Computer Vision Center (CVC) at", "['','K','M','G','T','P','E','Z']: if abs(num) < 1000.0: return \"%3.2f%s%s\" % (num, unit,", "0: sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s, %d seconds passed\" %", "chunk: # filter out keep-alive new chunks f.write(chunk) written_size +=", "- len(message) sys.stdout.write('\\r' + message + empty_space * ' ')", "% 5 == 0: sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s, %d", "session.get(url, params=params, stream=True) save_response_content(response, destination) def download_contents(): # download model", "# download model model_path = './cls_model.pth' if os.path.isfile(model_path): print('Model file", "response.iter_content(chunk_size): if chunk: # filter out keep-alive new chunks f.write(chunk)", "in', dataset_path) else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) #", "%s\" % (destination, sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80, 20)).columns - len(message)", "# Barcelona (UAB). # # This work is licensed under", "'r') zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait for 2 minutes ish...!') return", "* block_size) speed = int(progress_size / (1024 * duration)) percent", "') sys.stdout.flush() def download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination):", "if os.path.isfile(model_path): print('Model file already downloaded in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5',", "destination) def download_contents(): # download model model_path = './cls_model.pth' if", "time import urllib.request import zipfile def reporthook(count, block_size, total_size): global", "<https://opensource.org/licenses/MIT>. \"\"\"Download big files from Google Drive.\"\"\" import shutil import", "Barcelona (UAB). # # This work is licensed under the", "reporthook) # unzip dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close()", "of the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>.", "+ message + empty_space * ' ') sys.stdout.flush() def download_file_from_google_drive(id,", "%d seconds passed\" % (percent, progress_size / (1024 * 1024),", "'./shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset file already downloaded in', dataset_path) else:", "dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset file already downloaded in',", "f: for chunk in response.iter_content(chunk_size): if chunk: # filter out", "def download_contents(): # download model model_path = './cls_model.pth' if os.path.isfile(model_path):", "if count == 0: start_time = time.time() return duration =", "message + empty_space * ' ') sys.stdout.flush() def download_file_from_google_drive(id, destination):", "start_time = time.time() return duration = time.time() - start_time progress_size", "urllib.request import zipfile def reporthook(count, block_size, total_size): global start_time if", "abs(num) < 1000.0: return \"%3.2f%s%s\" % (num, unit, suffix) num", "% (num, 'Yi', suffix) def print_status(destination, progress): message = \"Downloading", "# https://stackoverflow.com/a/1094933/5308925 for unit in ['','K','M','G','T','P','E','Z']: if abs(num) < 1000.0:", "return \"%.2f%s%s\" % (num, 'Yi', suffix) def print_status(destination, progress): message", "terms of the MIT license. # For a copy, see", "in ['','K','M','G','T','P','E','Z']: if abs(num) < 1000.0: return \"%3.2f%s%s\" % (num,", "model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip'", "sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for unit in ['','K','M','G','T','P','E','Z']: if abs(num)", "/ (1024 * 1024), speed, duration)) sys.stdout.flush() def sizeof_fmt(num, suffix='B'):", "None url = \"https://docs.google.com/uc?export=download\" session = requests.Session() response = session.get(url,", "def get_confirm_token(response): for key, value in response.cookies.items(): if key.startswith('download_warning'): return", "shutil import sys import requests import os import time import", "urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) # unzip dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r')", "sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80, 20)).columns - len(message) sys.stdout.write('\\r' + message", "zip_ref.close() print('Now unzipping...Wait for 2 minutes ish...!') return 0 if", "from Google Drive.\"\"\" import shutil import sys import requests import", "the Universitat Autonoma de # Barcelona (UAB). # # This", "'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) # unzip dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url),", "get_confirm_token(response): for key, value in response.cookies.items(): if key.startswith('download_warning'): return value", "id, 'confirm': token} response = session.get(url, params=params, stream=True) save_response_content(response, destination)", "at the Universitat Autonoma de # Barcelona (UAB). # #", "suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for unit in ['','K','M','G','T','P','E','Z']: if abs(num) <", "return value return None url = \"https://docs.google.com/uc?export=download\" session = requests.Session()", "chunk in response.iter_content(chunk_size): if chunk: # filter out keep-alive new", "\"%.2f%s%s\" % (num, 'Yi', suffix) def print_status(destination, progress): message =", "dataset_path) else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) # unzip", "value return None url = \"https://docs.google.com/uc?export=download\" session = requests.Session() response", "token: params = {'id': id, 'confirm': token} response = session.get(url,", "Vision Center (CVC) at the Universitat Autonoma de # Barcelona", "is licensed under the terms of the MIT license. #", "def print_status(destination, progress): message = \"Downloading %s... %s\" % (destination,", "+= chunk_size print_status(destination, written_size) print('Done.') def get_confirm_token(response): for key, value", "% (percent, progress_size / (1024 * 1024), speed, duration)) sys.stdout.flush()", "print('Model file already downloaded in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') #", "{'id': id, 'confirm': token} response = session.get(url, params=params, stream=True) save_response_content(response,", "def sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for unit in ['','K','M','G','T','P','E','Z']: if", "#!/usr/bin/env python3 # Copyright (c) 2017 Computer Vision Center (CVC)", "key, value in response.cookies.items(): if key.startswith('download_warning'): return value return None", "unzip dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait", "Center (CVC) at the Universitat Autonoma de # Barcelona (UAB).", "= time.time() - start_time progress_size = int(count * block_size) speed", "percent % 5 == 0: sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s,", "(UAB). # # This work is licensed under the terms", "\"\"\"Download big files from Google Drive.\"\"\" import shutil import sys", "files from Google Drive.\"\"\" import shutil import sys import requests", "num /= 1000.0 return \"%.2f%s%s\" % (num, 'Yi', suffix) def", "import urllib.request import zipfile def reporthook(count, block_size, total_size): global start_time", "= 0 with open(destination, \"wb\") as f: for chunk in", "get_confirm_token(response) if token: params = {'id': id, 'confirm': token} response", "response.cookies.items(): if key.startswith('download_warning'): return value return None url = \"https://docs.google.com/uc?export=download\"", "duration)) sys.stdout.flush() def sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for unit in", "sys.stdout.flush() def sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for unit in ['','K','M','G','T','P','E','Z']:", "+ empty_space * ' ') sys.stdout.flush() def download_file_from_google_drive(id, destination): #", "import sys import requests import os import time import urllib.request", "in response.iter_content(chunk_size): if chunk: # filter out keep-alive new chunks", "Copyright (c) 2017 Computer Vision Center (CVC) at the Universitat", "# Copyright (c) 2017 Computer Vision Center (CVC) at the", "' ') sys.stdout.flush() def download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925 def save_response_content(response,", "percent = int(count * block_size * 100 / total_size) if", "licensed under the terms of the MIT license. # For", "\"wb\") as f: for chunk in response.iter_content(chunk_size): if chunk: #", "token = get_confirm_token(response) if token: params = {'id': id, 'confirm':", "reporthook(count, block_size, total_size): global start_time if count == 0: start_time", "print_status(destination, progress): message = \"Downloading %s... %s\" % (destination, sizeof_fmt(progress))", "unzipping...Wait for 2 minutes ish...!') return 0 if __name__ ==", "dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait for", "(CVC) at the Universitat Autonoma de # Barcelona (UAB). #", "for key, value in response.cookies.items(): if key.startswith('download_warning'): return value return", "for 2 minutes ish...!') return 0 if __name__ == '__main__':", "= requests.Session() response = session.get(url, params={'id': id}, stream=True) token =", "downloaded in', dataset_path) else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook)", "\"https://docs.google.com/uc?export=download\" session = requests.Session() response = session.get(url, params={'id': id}, stream=True)", "'Yi', suffix) def print_status(destination, progress): message = \"Downloading %s... %s\"", "'confirm': token} response = session.get(url, params=params, stream=True) save_response_content(response, destination) def", "'./cls_model.pth' if os.path.isfile(model_path): print('Model file already downloaded in', model_path) else:", "count == 0: start_time = time.time() return duration = time.time()", "0: start_time = time.time() return duration = time.time() - start_time", "= \"Downloading %s... %s\" % (destination, sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80,", "100 / total_size) if percent % 5 == 0: sys.stdout.write(\"\\r...%d%%,", "<reponame>JamesWang007/Open3D-PointNet<gh_stars>100-1000 #!/usr/bin/env python3 # Copyright (c) 2017 Computer Vision Center", "time.time() - start_time progress_size = int(count * block_size) speed =", "len(message) sys.stdout.write('\\r' + message + empty_space * ' ') sys.stdout.flush()", "if abs(num) < 1000.0: return \"%3.2f%s%s\" % (num, unit, suffix)", "download dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset file already", "(1024 * duration)) percent = int(count * block_size * 100", "return None url = \"https://docs.google.com/uc?export=download\" session = requests.Session() response =", "the terms of the MIT license. # For a copy,", "1000.0: return \"%3.2f%s%s\" % (num, unit, suffix) num /= 1000.0", "int(count * block_size) speed = int(progress_size / (1024 * duration))", "os.path.isfile(model_path): print('Model file already downloaded in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth')", "Drive.\"\"\" import shutil import sys import requests import os import", "1000.0 return \"%.2f%s%s\" % (num, 'Yi', suffix) def print_status(destination, progress):", "for unit in ['','K','M','G','T','P','E','Z']: if abs(num) < 1000.0: return \"%3.2f%s%s\"", "def download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination): chunk_size =", "block_size) speed = int(progress_size / (1024 * duration)) percent =", "as f: for chunk in response.iter_content(chunk_size): if chunk: # filter", "# filter out keep-alive new chunks f.write(chunk) written_size += chunk_size", "total_size): global start_time if count == 0: start_time = time.time()", "print_status(destination, written_size) print('Done.') def get_confirm_token(response): for key, value in response.cookies.items():", "empty_space = shutil.get_terminal_size((80, 20)).columns - len(message) sys.stdout.write('\\r' + message +", "block_size, total_size): global start_time if count == 0: start_time =", "* duration)) percent = int(count * block_size * 100 /", "chunk_size print_status(destination, written_size) print('Done.') def get_confirm_token(response): for key, value in", "duration)) percent = int(count * block_size * 100 / total_size)", "file already downloaded in', dataset_path) else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url,", "download model model_path = './cls_model.pth' if os.path.isfile(model_path): print('Model file already", "see <https://opensource.org/licenses/MIT>. \"\"\"Download big files from Google Drive.\"\"\" import shutil", "response = session.get(url, params={'id': id}, stream=True) token = get_confirm_token(response) if", "written_size) print('Done.') def get_confirm_token(response): for key, value in response.cookies.items(): if", "start_time progress_size = int(count * block_size) speed = int(progress_size /", "requests import os import time import urllib.request import zipfile def", "message = \"Downloading %s... %s\" % (destination, sizeof_fmt(progress)) empty_space =", "os.path.isfile(dataset_path): print('Dataset file already downloaded in', dataset_path) else: dataset_url =", "destination): chunk_size = 32768 written_size = 0 with open(destination, \"wb\")", "1024), speed, duration)) sys.stdout.flush() def sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for", "big files from Google Drive.\"\"\" import shutil import sys import", "== 0: start_time = time.time() return duration = time.time() -", "MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download big", "return duration = time.time() - start_time progress_size = int(count *", "= int(count * block_size) speed = int(progress_size / (1024 *", "* block_size * 100 / total_size) if percent % 5", "chunks f.write(chunk) written_size += chunk_size print_status(destination, written_size) print('Done.') def get_confirm_token(response):", "total_size) if percent % 5 == 0: sys.stdout.write(\"\\r...%d%%, %d MB,", "\"Downloading %s... %s\" % (destination, sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80, 20)).columns", "zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait for 2", "def save_response_content(response, destination): chunk_size = 32768 written_size = 0 with", "print('Done.') def get_confirm_token(response): for key, value in response.cookies.items(): if key.startswith('download_warning'):", "if percent % 5 == 0: sys.stdout.write(\"\\r...%d%%, %d MB, %d", "* ' ') sys.stdout.flush() def download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925 def", "else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if", "2017 Computer Vision Center (CVC) at the Universitat Autonoma de", "(num, 'Yi', suffix) def print_status(destination, progress): message = \"Downloading %s...", "if chunk: # filter out keep-alive new chunks f.write(chunk) written_size", "copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download big files from Google Drive.\"\"\" import", "Autonoma de # Barcelona (UAB). # # This work is", "KB/s, %d seconds passed\" % (percent, progress_size / (1024 *", "% (num, unit, suffix) num /= 1000.0 return \"%.2f%s%s\" %", "MB, %d KB/s, %d seconds passed\" % (percent, progress_size /", "# This work is licensed under the terms of the", "/ total_size) if percent % 5 == 0: sys.stdout.write(\"\\r...%d%%, %d", "speed = int(progress_size / (1024 * duration)) percent = int(count", "import os import time import urllib.request import zipfile def reporthook(count,", "written_size += chunk_size print_status(destination, written_size) print('Done.') def get_confirm_token(response): for key,", "os.path.basename(dataset_url), reporthook) # unzip dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.')", "chunk_size = 32768 written_size = 0 with open(destination, \"wb\") as", "int(count * block_size * 100 / total_size) if percent %", "32768 written_size = 0 with open(destination, \"wb\") as f: for", "zipfile def reporthook(count, block_size, total_size): global start_time if count ==", "This work is licensed under the terms of the MIT", "under the terms of the MIT license. # For a", "return \"%3.2f%s%s\" % (num, unit, suffix) num /= 1000.0 return", "keep-alive new chunks f.write(chunk) written_size += chunk_size print_status(destination, written_size) print('Done.')", "token} response = session.get(url, params=params, stream=True) save_response_content(response, destination) def download_contents():", "value in response.cookies.items(): if key.startswith('download_warning'): return value return None url", "key.startswith('download_warning'): return value return None url = \"https://docs.google.com/uc?export=download\" session =", "the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download", "save_response_content(response, destination): chunk_size = 32768 written_size = 0 with open(destination,", "%d KB/s, %d seconds passed\" % (percent, progress_size / (1024", "%d MB, %d KB/s, %d seconds passed\" % (percent, progress_size", "de # Barcelona (UAB). # # This work is licensed", "Google Drive.\"\"\" import shutil import sys import requests import os", "import time import urllib.request import zipfile def reporthook(count, block_size, total_size):", "block_size * 100 / total_size) if percent % 5 ==", "in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download dataset dataset_path =", "= time.time() return duration = time.time() - start_time progress_size =", "= int(count * block_size * 100 / total_size) if percent", "sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s, %d seconds passed\" % (percent,", "already downloaded in', model_path) else: download_file_from_google_drive('1WWf5B5fmik5_P1dwxltJ-atRkYeCcCC5', './cls_model.pth') # download dataset", "zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close() print('Now unzipping...Wait for 2 minutes ish...!')", "speed, duration)) sys.stdout.flush() def sizeof_fmt(num, suffix='B'): # https://stackoverflow.com/a/1094933/5308925 for unit", "/= 1000.0 return \"%.2f%s%s\" % (num, 'Yi', suffix) def print_status(destination,", "% (destination, sizeof_fmt(progress)) empty_space = shutil.get_terminal_size((80, 20)).columns - len(message) sys.stdout.write('\\r'", "progress_size = int(count * block_size) speed = int(progress_size / (1024", "https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination): chunk_size = 32768 written_size = 0", "5 == 0: sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s, %d seconds", "# unzip dataset zip_ref = zipfile.ZipFile(os.path.basename(dataset_url), 'r') zip_ref.extractall('.') zip_ref.close() print('Now", "'./cls_model.pth') # download dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset", "def reporthook(count, block_size, total_size): global start_time if count == 0:", "dataset dataset_path = './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset file already downloaded", "suffix) def print_status(destination, progress): message = \"Downloading %s... %s\" %", "unit in ['','K','M','G','T','P','E','Z']: if abs(num) < 1000.0: return \"%3.2f%s%s\" %", "session = requests.Session() response = session.get(url, params={'id': id}, stream=True) token", "for chunk in response.iter_content(chunk_size): if chunk: # filter out keep-alive", "requests.Session() response = session.get(url, params={'id': id}, stream=True) token = get_confirm_token(response)", "= './shapenetcore_partanno_segmentation_benchmark_v0.zip' if os.path.isfile(dataset_path): print('Dataset file already downloaded in', dataset_path)", "with open(destination, \"wb\") as f: for chunk in response.iter_content(chunk_size): if", "session.get(url, params={'id': id}, stream=True) token = get_confirm_token(response) if token: params", "passed\" % (percent, progress_size / (1024 * 1024), speed, duration))", "duration = time.time() - start_time progress_size = int(count * block_size)", "2 minutes ish...!') return 0 if __name__ == '__main__': download_contents()", "work is licensed under the terms of the MIT license.", "response = session.get(url, params=params, stream=True) save_response_content(response, destination) def download_contents(): #", "# # This work is licensed under the terms of", "= \"https://docs.google.com/uc?export=download\" session = requests.Session() response = session.get(url, params={'id': id},", "written_size = 0 with open(destination, \"wb\") as f: for chunk", "stream=True) save_response_content(response, destination) def download_contents(): # download model model_path =", "out keep-alive new chunks f.write(chunk) written_size += chunk_size print_status(destination, written_size)", "sys.stdout.flush() def download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination): chunk_size", "progress): message = \"Downloading %s... %s\" % (destination, sizeof_fmt(progress)) empty_space", "a copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download big files from Google Drive.\"\"\"", "params={'id': id}, stream=True) token = get_confirm_token(response) if token: params =", "empty_space * ' ') sys.stdout.flush() def download_file_from_google_drive(id, destination): # https://stackoverflow.com/a/39225039/5308925", "sys.stdout.write('\\r' + message + empty_space * ' ') sys.stdout.flush() def", "global start_time if count == 0: start_time = time.time() return", "start_time if count == 0: start_time = time.time() return duration", "destination): # https://stackoverflow.com/a/39225039/5308925 def save_response_content(response, destination): chunk_size = 32768 written_size", "import shutil import sys import requests import os import time", "* 100 / total_size) if percent % 5 == 0:", "= 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip' urllib.request.urlretrieve(dataset_url, os.path.basename(dataset_url), reporthook) # unzip dataset zip_ref =", "print('Dataset file already downloaded in', dataset_path) else: dataset_url = 'https://shapenet.cs.stanford.edu/ericyi/shapenetcore_partanno_segmentation_benchmark_v0.zip'", "= './cls_model.pth' if os.path.isfile(model_path): print('Model file already downloaded in', model_path)", "# For a copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download big files from", "For a copy, see <https://opensource.org/licenses/MIT>. \"\"\"Download big files from Google", "stream=True) token = get_confirm_token(response) if token: params = {'id': id,", "(num, unit, suffix) num /= 1000.0 return \"%.2f%s%s\" % (num,", "os import time import urllib.request import zipfile def reporthook(count, block_size,", "== 0: sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s, %d seconds passed\"", "id}, stream=True) token = get_confirm_token(response) if token: params = {'id':" ]
[ "button1 = tkinter.Button(panel, text='下载', command=download) button1.pack(side='left') button2 = tkinter.Button(panel, text='关于',", "\"\"\" import time import tkinter import tkinter.messagebox def download(): #", "tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True) panel = tkinter.Frame(top) button1 =", "command=download) button1.pack(side='left') button2 = tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom') tkinter.mainloop()", "top.geometry('200x150') top.wm_attributes('-topmost', True) panel = tkinter.Frame(top) button1 = tkinter.Button(panel, text='下载',", "tkinter.Button(panel, text='下载', command=download) button1.pack(side='left') button2 = tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right')", "= tkinter.Frame(top) button1 = tkinter.Button(panel, text='下载', command=download) button1.pack(side='left') button2 =", "import tkinter import tkinter.messagebox def download(): # 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示',", "import tkinter.messagebox def download(): # 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def", "def main(): top = tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True) panel", "import time import tkinter import tkinter.messagebox def download(): # 模拟下载任务需要花费10秒时间", "-*- coding: utf-8 -*- \"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time import", "将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time import tkinter import tkinter.messagebox def download():", "tkinter import tkinter.messagebox def download(): # 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成')", "top.wm_attributes('-topmost', True) panel = tkinter.Frame(top) button1 = tkinter.Button(panel, text='下载', command=download)", "show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def main(): top = tkinter.Tk() top.title('单线程') top.geometry('200x150')", "模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def main():", "panel = tkinter.Frame(top) button1 = tkinter.Button(panel, text='下载', command=download) button1.pack(side='left') button2", "def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def main(): top = tkinter.Tk() top.title('单线程')", "= tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True) panel = tkinter.Frame(top) button1", "text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom') tkinter.mainloop() if __name__ == '__main__': main()", "utf-8 -*- \"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time import tkinter import", "tkinter.messagebox.showinfo('关于', '作者:罗浩') def main(): top = tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost',", "\"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time import tkinter import tkinter.messagebox def", "top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True) panel = tkinter.Frame(top) button1 = tkinter.Button(panel,", "top = tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True) panel = tkinter.Frame(top)", "download(): # 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩')", "tkinter.messagebox.showinfo('提示', '下载完成') def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def main(): top =", "True) panel = tkinter.Frame(top) button1 = tkinter.Button(panel, text='下载', command=download) button1.pack(side='left')", "time import tkinter import tkinter.messagebox def download(): # 模拟下载任务需要花费10秒时间 time.sleep(10)", "-*- \"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time import tkinter import tkinter.messagebox", "# 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def", "time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def main(): top", "def download(): # 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def show_about(): tkinter.messagebox.showinfo('关于',", "coding: utf-8 -*- \"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time import tkinter", "button2 = tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom') tkinter.mainloop() if __name__", "# -*- coding: utf-8 -*- \"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import time", "tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom') tkinter.mainloop() if __name__ == '__main__':", "<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" 将耗时间的任务放到线程中以获得更好的用户体验。 \"\"\" import", "tkinter.messagebox def download(): # 模拟下载任务需要花费10秒时间 time.sleep(10) tkinter.messagebox.showinfo('提示', '下载完成') def show_about():", "main(): top = tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True) panel =", "text='下载', command=download) button1.pack(side='left') button2 = tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom')", "button1.pack(side='left') button2 = tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom') tkinter.mainloop() if", "tkinter.Frame(top) button1 = tkinter.Button(panel, text='下载', command=download) button1.pack(side='left') button2 = tkinter.Button(panel,", "= tkinter.Button(panel, text='下载', command=download) button1.pack(side='left') button2 = tkinter.Button(panel, text='关于', command=show_about)", "'作者:罗浩') def main(): top = tkinter.Tk() top.title('单线程') top.geometry('200x150') top.wm_attributes('-topmost', True)", "= tkinter.Button(panel, text='关于', command=show_about) button2.pack(side='right') panel.pack(side='bottom') tkinter.mainloop() if __name__ ==", "'下载完成') def show_about(): tkinter.messagebox.showinfo('关于', '作者:罗浩') def main(): top = tkinter.Tk()" ]
[ "given converstation\"\"\" path = path.split(\"/\") conv_or_user_id = path[1] if conv_or_user_id", "unknown gitlab webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab webhook object", "json.dumps(payload)) refs = payload.get(\"ref\", '').split(\"/\") user = payload.get(\"user_name\") if not", "install python_dateutil\") raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API posts from", "is None: logger.error(\"conversation or user id must be provided as", "None: logger.error(\"conversation or user id must be provided as part", "logging from sinks.base_bot_request_handler import AsyncRequestHandler logger = logging.getLogger(__name__) try: import", "not user: user = payload[\"user\"][\"name\"] message = [\"GitLab update for", "message.append(\"Pushed {} commit(s) on {} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit", "payload[\"commits\"]: message.append(\"{} -- {} at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"]))", "request = payload[\"object_attributes\"] message.append(\"Merge request {}: from [{}:{}]({}) to [{}:{}]({})\".format(", "try: payload = json.loads(content) except json.JSONDecodeError as err: logger.exception(\"invalid payload", "module python_dateutil: pip3 install python_dateutil\") raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST", "{}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"] == \"merge_request\":", "message.append(\"Merge request {}: from [{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"],", "from [{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"],", "at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"] ==", "= payload[\"object_attributes\"] message.append(\"{} note on {}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"],", "import dateutil.parser except ImportError: logger.error(\"missing module python_dateutil: pip3 install python_dateutil\")", "{}: from [{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"],", "= path.split(\"/\") conv_or_user_id = path[1] if conv_or_user_id is None: logger.error(\"conversation", "request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown", "{} at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"]", "issue {} at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif", "\"merge_request\": request = payload[\"object_attributes\"] message.append(\"Merge request {}: from [{}:{}]({}) to", "content): \"\"\"Process a received POST to a given converstation\"\"\" path", "= json.loads(content) except json.JSONDecodeError as err: logger.exception(\"invalid payload @%d:%d: %s\",", "%s\", err.lineno, err.colno, err) logger.error(\"GitLab message: %s\", json.dumps(payload)) refs =", "request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown gitlab webhook object", "\"/\".join(refs[2:]))) for commit in payload[\"commits\"]: message.append(\"{} -- {} at [{:%c}]({})\".format(", "note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"] == \"merge_request\": request = payload[\"object_attributes\"] message.append(\"Merge", "request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown gitlab webhook object kind\".format(payload[\"object_kind\"]))", "commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:])))", "payload[\"object_attributes\"] message.append(\"Update {} issue {} at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]),", "GitLab\"\"\" _bot = None @asyncio.coroutine def process_request(self, path, dummy_query_string, content):", "user id must be provided as part of path\") return", "err) logger.error(\"GitLab message: %s\", json.dumps(payload)) refs = payload.get(\"ref\", '').split(\"/\") user", "payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit in payload[\"commits\"]: message.append(\"{} -- {} at", "note = payload[\"object_attributes\"] message.append(\"{} note on {}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"],", "{} at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"] ==", "[{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"]))", "see http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio import json import logging from", "AsyncRequestHandler logger = logging.getLogger(__name__) try: import dateutil.parser except ImportError: logger.error(\"missing", "\"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] == \"issue\": issue =", "request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown gitlab webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown", "logger.error(\"conversation or user id must be provided as part of", "of path\") return try: payload = json.loads(content) except json.JSONDecodeError as", "as err: logger.exception(\"invalid payload @%d:%d: %s\", err.lineno, err.colno, err) logger.error(\"GitLab", "_bot = None @asyncio.coroutine def process_request(self, path, dummy_query_string, content): \"\"\"Process", "conv_or_user_id is None: logger.error(\"conversation or user id must be provided", "err.lineno, err.colno, err) logger.error(\"GitLab message: %s\", json.dumps(payload)) refs = payload.get(\"ref\",", "except json.JSONDecodeError as err: logger.exception(\"invalid payload @%d:%d: %s\", err.lineno, err.colno,", "http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio import json import logging from sinks.base_bot_request_handler", "payload[\"object_attributes\"] message.append(\"Merge request {}: from [{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"],", "must be provided as part of path\") return try: payload", "python_dateutil\") raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API posts from GitLab\"\"\"", "payload[\"object_kind\"] == \"push\": message.append(\"Pushed {} commit(s) on {} branch:\".format( payload[\"total_commits_count\"],", "[{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}:", "payload[\"object_kind\"] == \"note\": note = payload[\"object_attributes\"] message.append(\"{} note on {}:", "webhook object kind\", payload[\"object_kind\"]) if message: yield from self.send_data(conv_or_user_id, \"\\n\".join(message))", "issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"] == \"note\": note = payload[\"object_attributes\"] message.append(\"{}", "note[\"id\"], note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"] == \"merge_request\": request = payload[\"object_attributes\"]", "json.loads(content) except json.JSONDecodeError as err: logger.exception(\"invalid payload @%d:%d: %s\", err.lineno,", "path\") return try: payload = json.loads(content) except json.JSONDecodeError as err:", "request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown gitlab", "= [\"GitLab update for [{}]({}) by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)]", "issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"] == \"note\": note", "logger.exception(\"invalid payload @%d:%d: %s\", err.lineno, err.colno, err) logger.error(\"GitLab message: %s\",", "as part of path\") return try: payload = json.loads(content) except", "posts from GitLab\"\"\" _bot = None @asyncio.coroutine def process_request(self, path,", "webhook receiver - see http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio import json", "asyncio import json import logging from sinks.base_bot_request_handler import AsyncRequestHandler logger", "unknown gitlab webhook object kind\", payload[\"object_kind\"]) if message: yield from", "\"\"\"Process a received POST to a given converstation\"\"\" path =", "or user id must be provided as part of path\")", "= payload[\"object_attributes\"] message.append(\"Update {} issue {} at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"],", "import asyncio import json import logging from sinks.base_bot_request_handler import AsyncRequestHandler", "@asyncio.coroutine def process_request(self, path, dummy_query_string, content): \"\"\"Process a received POST", "API posts from GitLab\"\"\" _bot = None @asyncio.coroutine def process_request(self,", "== \"push\": message.append(\"Pushed {} commit(s) on {} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:])))", "== \"merge_request\": request = payload[\"object_attributes\"] message.append(\"Merge request {}: from [{}:{}]({})", "raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API posts from GitLab\"\"\" _bot", "else: message.append(\"{}: unknown gitlab webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab", "= payload.get(\"ref\", '').split(\"/\") user = payload.get(\"user_name\") if not user: user", "import AsyncRequestHandler logger = logging.getLogger(__name__) try: import dateutil.parser except ImportError:", "part of path\") return try: payload = json.loads(content) except json.JSONDecodeError", "payload @%d:%d: %s\", err.lineno, err.colno, err) logger.error(\"GitLab message: %s\", json.dumps(payload))", "dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"] == \"note\": note = payload[\"object_attributes\"]", "user = payload.get(\"user_name\") if not user: user = payload[\"user\"][\"name\"] message", "= payload[\"user\"][\"name\"] message = [\"GitLab update for [{}]({}) by __{}__\".format(", "[{}]({}) by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"] == \"push\":", "{} issue {} at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"]))", "= payload[\"object_attributes\"] message.append(\"Merge request {}: from [{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"],", "== \"note\": note = payload[\"object_attributes\"] message.append(\"{} note on {}: [{}]({})\".format(", "from GitLab\"\"\" _bot = None @asyncio.coroutine def process_request(self, path, dummy_query_string,", "a received POST to a given converstation\"\"\" path = path.split(\"/\")", "request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown gitlab webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s:", "gitlab webhook object kind\", payload[\"object_kind\"]) if message: yield from self.send_data(conv_or_user_id,", "issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"] == \"note\": note =", "path = path.split(\"/\") conv_or_user_id = path[1] if conv_or_user_id is None:", "note[\"url\"])) elif payload[\"object_kind\"] == \"merge_request\": request = payload[\"object_attributes\"] message.append(\"Merge request", "at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"] == \"tag_push\":", "provided as part of path\") return try: payload = json.loads(content)", "dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif", "\"\"\"Receive REST API posts from GitLab\"\"\" _bot = None @asyncio.coroutine", "webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API posts from GitLab\"\"\" _bot = None", "be provided as part of path\") return try: payload =", "request {}: from [{}:{}]({}) to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"],", "= logging.getLogger(__name__) try: import dateutil.parser except ImportError: logger.error(\"missing module python_dateutil:", "commit in payload[\"commits\"]: message.append(\"{} -- {} at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"],", "payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"] == \"push\": message.append(\"Pushed {} commit(s)", "= path[1] if conv_or_user_id is None: logger.error(\"conversation or user id", "payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"] == \"push\": message.append(\"Pushed {} commit(s) on", "if not user: user = payload[\"user\"][\"name\"] message = [\"GitLab update", "for [{}]({}) by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"] ==", "commit(s) on {} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit in payload[\"commits\"]:", "update for [{}]({}) by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"]", "payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] == \"issue\":", "GitLab webhook receiver - see http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio import", "message.append(\"{}: unknown gitlab webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab webhook", "logger.warning(\"%s: unknown gitlab webhook object kind\", payload[\"object_kind\"]) if message: yield", "import logging from sinks.base_bot_request_handler import AsyncRequestHandler logger = logging.getLogger(__name__) try:", "for commit in payload[\"commits\"]: message.append(\"{} -- {} at [{:%c}]({})\".format( commit[\"message\"],", "commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed tag", "json.JSONDecodeError as err: logger.exception(\"invalid payload @%d:%d: %s\", err.lineno, err.colno, err)", "payload[\"object_kind\"] == \"merge_request\": request = payload[\"object_attributes\"] message.append(\"Merge request {}: from", "user)] if payload[\"object_kind\"] == \"push\": message.append(\"Pushed {} commit(s) on {}", "logger.error(\"missing module python_dateutil: pip3 install python_dateutil\") raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive", "{:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"], issue[\"url\"])) elif payload[\"object_kind\"] == \"note\":", "elif payload[\"object_kind\"] == \"issue\": issue = payload[\"object_attributes\"] message.append(\"Update {} issue", "class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API posts from GitLab\"\"\" _bot =", "conv_or_user_id = path[1] if conv_or_user_id is None: logger.error(\"conversation or user", "to a given converstation\"\"\" path = path.split(\"/\") conv_or_user_id = path[1]", "branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit in payload[\"commits\"]: message.append(\"{} -- {}", "issue = payload[\"object_attributes\"] message.append(\"Update {} issue {} at {:%c}\\n[{}]({})\".format( issue[\"state\"],", "on {} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit in payload[\"commits\"]: message.append(\"{}", "try: import dateutil.parser except ImportError: logger.error(\"missing module python_dateutil: pip3 install", "def process_request(self, path, dummy_query_string, content): \"\"\"Process a received POST to", "= None @asyncio.coroutine def process_request(self, path, dummy_query_string, content): \"\"\"Process a", "return try: payload = json.loads(content) except json.JSONDecodeError as err: logger.exception(\"invalid", "logger.error(\"GitLab message: %s\", json.dumps(payload)) refs = payload.get(\"ref\", '').split(\"/\") user =", "{} commit(s) on {} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit in", "elif payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] ==", "payload[\"object_attributes\"] message.append(\"{} note on {}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"]))", "gitlab webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab webhook object kind\",", "object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab webhook object kind\", payload[\"object_kind\"]) if", "user = payload[\"user\"][\"name\"] message = [\"GitLab update for [{}]({}) by", "tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] == \"issue\": issue = payload[\"object_attributes\"] message.append(\"Update", "python_dateutil: pip3 install python_dateutil\") raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API", "kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab webhook object kind\", payload[\"object_kind\"]) if message:", "[{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed", "on {}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"] ==", "except ImportError: logger.error(\"missing module python_dateutil: pip3 install python_dateutil\") raise class", "message.append(\"{} -- {} at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif", "elif payload[\"object_kind\"] == \"note\": note = payload[\"object_attributes\"] message.append(\"{} note on", "{}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] == \"issue\": issue = payload[\"object_attributes\"] message.append(\"Update {}", "[\"GitLab update for [{}]({}) by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if", "note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"] == \"merge_request\": request =", "POST to a given converstation\"\"\" path = path.split(\"/\") conv_or_user_id =", "[{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"] == \"merge_request\": request", "- see http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio import json import logging", "note on {}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"])) elif payload[\"object_kind\"]", "message.append(\"{} note on {}: [{}]({})\".format( note[\"notable_type\"], note[\"id\"], note[\"note\"], note[\"url\"])) elif", "pip3 install python_dateutil\") raise class webhookReceiver(AsyncRequestHandler): \"\"\"Receive REST API posts", "dateutil.parser except ImportError: logger.error(\"missing module python_dateutil: pip3 install python_dateutil\") raise", "received POST to a given converstation\"\"\" path = path.split(\"/\") conv_or_user_id", "err.colno, err) logger.error(\"GitLab message: %s\", json.dumps(payload)) refs = payload.get(\"ref\", '').split(\"/\")", "= payload.get(\"user_name\") if not user: user = payload[\"user\"][\"name\"] message =", "\"note\": note = payload[\"object_attributes\"] message.append(\"{} note on {}: [{}]({})\".format( note[\"notable_type\"],", "path.split(\"/\") conv_or_user_id = path[1] if conv_or_user_id is None: logger.error(\"conversation or", "payload[\"object_kind\"] == \"issue\": issue = payload[\"object_attributes\"] message.append(\"Update {} issue {}", "== \"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] == \"issue\": issue", "payload.get(\"user_name\") if not user: user = payload[\"user\"][\"name\"] message = [\"GitLab", "request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else: message.append(\"{}: unknown gitlab webhook", "to [{}:{}]({})\".format( request[\"id\"], request[\"source\"][\"name\"], request[\"source_branch\"], request[\"source\"][\"web_url\"], request[\"target\"][\"name\"], request[\"target_branch\"], request[\"target\"][\"web_url\"])) else:", "None @asyncio.coroutine def process_request(self, path, dummy_query_string, content): \"\"\"Process a received", "issue[\"url\"])) elif payload[\"object_kind\"] == \"note\": note = payload[\"object_attributes\"] message.append(\"{} note", "process_request(self, path, dummy_query_string, content): \"\"\"Process a received POST to a", "REST API posts from GitLab\"\"\" _bot = None @asyncio.coroutine def", "'').split(\"/\") user = payload.get(\"user_name\") if not user: user = payload[\"user\"][\"name\"]", "if conv_or_user_id is None: logger.error(\"conversation or user id must be", "ImportError: logger.error(\"missing module python_dateutil: pip3 install python_dateutil\") raise class webhookReceiver(AsyncRequestHandler):", "if payload[\"object_kind\"] == \"push\": message.append(\"Pushed {} commit(s) on {} branch:\".format(", "message: %s\", json.dumps(payload)) refs = payload.get(\"ref\", '').split(\"/\") user = payload.get(\"user_name\")", "__{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"] == \"push\": message.append(\"Pushed {}", "message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"] == \"issue\": issue = payload[\"object_attributes\"]", "import json import logging from sinks.base_bot_request_handler import AsyncRequestHandler logger =", "== \"issue\": issue = payload[\"object_attributes\"] message.append(\"Update {} issue {} at", "\"issue\": issue = payload[\"object_attributes\"] message.append(\"Update {} issue {} at {:%c}\\n[{}]({})\".format(", "message = [\"GitLab update for [{}]({}) by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"],", "%s\", json.dumps(payload)) refs = payload.get(\"ref\", '').split(\"/\") user = payload.get(\"user_name\") if", "elif payload[\"object_kind\"] == \"merge_request\": request = payload[\"object_attributes\"] message.append(\"Merge request {}:", "json import logging from sinks.base_bot_request_handler import AsyncRequestHandler logger = logging.getLogger(__name__)", "@%d:%d: %s\", err.lineno, err.colno, err) logger.error(\"GitLab message: %s\", json.dumps(payload)) refs", "payload[\"user\"][\"name\"] message = [\"GitLab update for [{}]({}) by __{}__\".format( payload[\"project\"][\"name\"],", "\"push\": message.append(\"Pushed {} commit(s) on {} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for", "id must be provided as part of path\") return try:", "\"\"\" GitLab webhook receiver - see http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio", "payload.get(\"ref\", '').split(\"/\") user = payload.get(\"user_name\") if not user: user =", "payload = json.loads(content) except json.JSONDecodeError as err: logger.exception(\"invalid payload @%d:%d:", "-- {} at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]), commit[\"url\"])) elif payload[\"object_kind\"]", "message.append(\"Update {} issue {} at {:%c}\\n[{}]({})\".format( issue[\"state\"], issue[\"id\"], dateutil.parser.parse(issue[\"updated_at\"]), issue[\"title\"],", "refs = payload.get(\"ref\", '').split(\"/\") user = payload.get(\"user_name\") if not user:", "\"\"\" import asyncio import json import logging from sinks.base_bot_request_handler import", "{} branch:\".format( payload[\"total_commits_count\"], \"/\".join(refs[2:]))) for commit in payload[\"commits\"]: message.append(\"{} --", "converstation\"\"\" path = path.split(\"/\") conv_or_user_id = path[1] if conv_or_user_id is", "webhook object kind\".format(payload[\"object_kind\"])) logger.warning(\"%s: unknown gitlab webhook object kind\", payload[\"object_kind\"])", "in payload[\"commits\"]: message.append(\"{} -- {} at [{:%c}]({})\".format( commit[\"message\"], commit[\"author\"][\"name\"], dateutil.parser.parse(commit[\"timestamp\"]),", "logger = logging.getLogger(__name__) try: import dateutil.parser except ImportError: logger.error(\"missing module", "sinks.base_bot_request_handler import AsyncRequestHandler logger = logging.getLogger(__name__) try: import dateutil.parser except", "path, dummy_query_string, content): \"\"\"Process a received POST to a given", "user: user = payload[\"user\"][\"name\"] message = [\"GitLab update for [{}]({})", "path[1] if conv_or_user_id is None: logger.error(\"conversation or user id must", "receiver - see http://doc.gitlab.com/ee/web_hooks/web_hooks.html \"\"\" import asyncio import json import", "by __{}__\".format( payload[\"project\"][\"name\"], payload[\"project\"][\"web_url\"], user)] if payload[\"object_kind\"] == \"push\": message.append(\"Pushed", "commit[\"url\"])) elif payload[\"object_kind\"] == \"tag_push\": message.append(\"Pushed tag {}]\".format(\"/\".join(refs[2:]))) elif payload[\"object_kind\"]", "from sinks.base_bot_request_handler import AsyncRequestHandler logger = logging.getLogger(__name__) try: import dateutil.parser", "a given converstation\"\"\" path = path.split(\"/\") conv_or_user_id = path[1] if", "dummy_query_string, content): \"\"\"Process a received POST to a given converstation\"\"\"", "err: logger.exception(\"invalid payload @%d:%d: %s\", err.lineno, err.colno, err) logger.error(\"GitLab message:", "logging.getLogger(__name__) try: import dateutil.parser except ImportError: logger.error(\"missing module python_dateutil: pip3" ]
[ "os import subprocess as sp class Assessor(BehaviorModelExecutor): def __init__(self, instance_time,", "evsim.system_simulator import SystemSimulator from evsim.behavior_model_executor import BehaviorModelExecutor from evsim.system_message import", "\"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object: Move\") #print(temp) return", "self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port, msg): data", "(SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def output(self): #temp = \"[%f] %s\" %", "name, engine_name) # Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1)", "# Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\")", "Assessor(BehaviorModelExecutor): def __init__(self, instance_time, destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time,", "import * import os import subprocess as sp class Assessor(BehaviorModelExecutor):", "sp class Assessor(BehaviorModelExecutor): def __init__(self, instance_time, destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self,", "from evsim.system_message import SysMessage from evsim.definition import * import os", "%s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object: Move\") #print(temp) return None", "(SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object: Move\") #print(temp) return None def int_trans(self):", "#temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def output(self):", "% (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object: Move\") #print(temp) return None def", "CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port,", "<reponame>cbchoi/nppsim<gh_stars>1-10 from evsim.system_simulator import SystemSimulator from evsim.behavior_model_executor import BehaviorModelExecutor from", "import subprocess as sp class Assessor(BehaviorModelExecutor): def __init__(self, instance_time, destruct_time,", "%s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def output(self): #temp = \"[%f]", "self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port, msg):", "Object: Move\") #print(temp) return None def int_trans(self): self._cur_state = \"MOVE\"", "name, engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name, engine_name) # Open CSV", "as sp class Assessor(BehaviorModelExecutor): def __init__(self, instance_time, destruct_time, name, engine_name):", "+ \" \" + str(data[0])) #temp = \"[%f] %s\" %", "import SystemSimulator from evsim.behavior_model_executor import BehaviorModelExecutor from evsim.system_message import SysMessage", "self.insert_output_port(\"done\") def ext_trans(self,port, msg): data = msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) +", "evsim.definition import * import os import subprocess as sp class", "evsim.system_message import SysMessage from evsim.definition import * import os import", "= msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) + \" \" + str(data[0])) #temp", "BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name, engine_name) # Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\",", "SysMessage from evsim.definition import * import os import subprocess as", "import SysMessage from evsim.definition import * import os import subprocess", "* import os import subprocess as sp class Assessor(BehaviorModelExecutor): def", "\" + str(data[0])) #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0]))", "msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) + \" \" + str(data[0])) #temp =", "str(data[0])) #print(temp) def output(self): #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(),", "instance_time, destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name, engine_name) #", "def ext_trans(self,port, msg): data = msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) + \"", "class Assessor(BehaviorModelExecutor): def __init__(self, instance_time, destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self, instance_time,", "output(self): #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object:", "engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name, engine_name) # Open CSV self.init_state(\"IDLE\")", "instance_time, destruct_time, name, engine_name) # Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite)", "\"Human Receiver Object: Move\") #print(temp) return None def int_trans(self): self._cur_state", "\"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def output(self): #temp =", "import os import subprocess as sp class Assessor(BehaviorModelExecutor): def __init__(self,", "engine_name) # Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\")", "def __init__(self, instance_time, destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name,", "from evsim.behavior_model_executor import BehaviorModelExecutor from evsim.system_message import SysMessage from evsim.definition", "Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def", "1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port, msg): data = msg.retrieve() #print(\"Assessor\")", "#print(\"Assessor\") #print(str(datetime.datetime.now()) + \" \" + str(data[0])) #temp = \"[%f]", "__init__(self, instance_time, destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name, engine_name)", "str(data[0])) #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def", "subprocess as sp class Assessor(BehaviorModelExecutor): def __init__(self, instance_time, destruct_time, name,", "destruct_time, name, engine_name) # Open CSV self.init_state(\"IDLE\") self.insert_state(\"IDLE\", Infinite) self.insert_state(\"MOVE\",", "#temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object: Move\")", "import BehaviorModelExecutor from evsim.system_message import SysMessage from evsim.definition import *", "destruct_time, name, engine_name): BehaviorModelExecutor.__init__(self, instance_time, destruct_time, name, engine_name) # Open", "Infinite) self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port, msg): data =", "% (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def output(self): #temp = \"[%f] %s\"", "SystemSimulator from evsim.behavior_model_executor import BehaviorModelExecutor from evsim.system_message import SysMessage from", "= \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver Object: Move\") #print(temp)", "+ str(data[0])) #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp)", "data = msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) + \" \" + str(data[0]))", "ext_trans(self,port, msg): data = msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) + \" \"", "def output(self): #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human Receiver", "\" \" + str(data[0])) #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(),", "self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port, msg): data = msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now())", "from evsim.definition import * import os import subprocess as sp", "#print(str(datetime.datetime.now()) + \" \" + str(data[0])) #temp = \"[%f] %s\"", "evsim.behavior_model_executor import BehaviorModelExecutor from evsim.system_message import SysMessage from evsim.definition import", "= \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), str(data[0])) #print(temp) def output(self): #temp", "from evsim.system_simulator import SystemSimulator from evsim.behavior_model_executor import BehaviorModelExecutor from evsim.system_message", "#print(temp) def output(self): #temp = \"[%f] %s\" % (SystemSimulator().get_engine(self.engine_name).get_global_time(), \"Human", "BehaviorModelExecutor from evsim.system_message import SysMessage from evsim.definition import * import", "Receiver Object: Move\") #print(temp) return None def int_trans(self): self._cur_state =", "msg): data = msg.retrieve() #print(\"Assessor\") #print(str(datetime.datetime.now()) + \" \" +", "self.insert_state(\"MOVE\", 1) self.insert_input_port(\"assess\") self.insert_output_port(\"done\") def ext_trans(self,port, msg): data = msg.retrieve()" ]
[ "key in result: _new_dict[key] = filter_received_data(key, data_selection) result = _new_dict", "[\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\":", "\"Person\", \"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\", }, ], \"name\": [\"preferredName\", \"str\",", "formatted with a gnd id number of self.gnd_id (list or", "self.show_printmessages else None self.apiindex = 0 if self.gnd_id is not", "definition. list mode = select specific aliases from base set)", "received or not remaining_apis_to_check: list of apiindex values, which have", "without connectivity check.\" ) if self.show_printmessages else None def connectivitycheck_single(self,", "index_to_test: int, gnd_id_to_test: str = \"118540238\") -> bool: \"\"\"auxiliary method", "_temp_categorial_values: if _temp_categorial_values[_type] in _temp_data: _temp_data = _type selected_categories_data[category] =", "-> Union[dict, None]: \"\"\"method to receive data from api with", "{} for category in selected_categories: _temp_data = [] try: _temp_data", "\"gnd_apilist.json\") try: self.apilist: Union[dict, None] = FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True,", "done without connectivity check.\" ) if self.show_printmessages else None def", "for i, _ in enumerate(self.apilist)] if self.check_connectivity == True: self.connectivitycheck_loop()", "connectivitycheck_loop()) show_printmessages: show class internal printmessages on runtime or not", "continuing attempt to receive gnd data from {self.apilist[self.apiindex]['name']} api...\" )", "if self.show_printmessages else None result[gnd] = _temp_data print( f\"GndConnector get_gnd_data()", "tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary from", "api...\" ) if self.show_printmessages else None elif self.connection_established == False:", "str = os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\") try: self.apilist: Union[dict, None]", "bool = show_printmessages self.gnd_id: Union[str, List[str], None] = gnd_id self.apiindex:", "[\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\",", ") if self.show_printmessages else None return -1 def return_complete_url(self, index:", "be delivered by api\" ) if self.show_printmessages else None return", "\"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\": [\"str\"]}], \"nominal\", ],", "user-defined set of keys, for which the mapping is provided", "in get_gnd_data() method: could not load resource from api as", "type(self.gnd_id) == str: print( f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if", "(logic-wise: 'categorial' or 'nominal'), 4. a categorization dict, if the", "if delivered, a normalized output is generated by renaming keys", "returns 0 or -1 for unittest purposes\"\"\" if self.check_connectivity ==", "str: print( f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages else", "not None: if type(self.gnd_id) == str: _new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id,", "int = 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector", "!= False: result[self.gnd_id] = _temp_data print( f\"GndConnector get_gnd_data() status: data", "= [ { \"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\":", "\"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\": [ \"@type\", \"str\", \"categorial\", {", "or -1 for unittest purposes\"\"\" if self.apiindex not in [i", "in enumerate(self.gnd_id): _temp_data = {} try: filereader = FileReader( filepath=self.return_complete_url(index),", "type str, example 2: using lobid api the value of", "gnd id number {index + 1} in passed gnd id", "else None return -1 def print_complete_url(self, index: int = 0)", "\"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values", "apis have not been checked yet. to do so manually", "self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) == list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector", "data types if ( len(_temp_data) > 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] ==", "do so manually execute connectivitycheck_loop() method of the current connector", "with default settings...\" ) if self.show_printmessages else None self.apilist: List[dict]", "else: print( \"GndConnector connectivity check error: none of the listed", "extracts the key-value pairs from the raw data received from", "type(mode) == str else 'baseAliases' # => allow parsing a", "self.show_printmessages == True else None self.check_connectivity: bool = check_connectivity self.connection_established:", "raw data received from api for one gnd id number", "return False if type(result) == dict: return True return False", "str: _temp_data = {} try: filereader = FileReader( filepath=self.return_complete_url(), origin=\"web\",", "data can be passed to an instance of Cache class", "the list) 1. the original key name, 2. the original", "has been established to any of the available apis. gnd", "the listed apis is responding as expected.\" ) if self.show_printmessages", "str) selected by index value\"\"\" if self.apiindex not in [i", "Goethe\"\"\" try: result: dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages,", "the keys of 'baseAliases' dict define the new key names,", "find {category} information for {gnd_id} in raw data. continuing processing...\"", "get_gnd_data() status: for gnd id {index + 1} ({gnd}) of", "{gnd_id} in raw data. continuing processing...\" ) if self.show_printmessages else", "} # selected_categories_list = category_sets.get(mode)[0] if type(mode) == str else", "implemented: can be a \"person\", \"place\", \"organization\" or a custom", "logic-wise is 'categorial': it delivers mapping information to assign a", "self.show_printmessages else None return 0 else: print( \"GndConnector print_complete_url() internal", "queries can not be executed at the moment.\" ) if", "== str else 'baseAliases' # => allow parsing a list", "a value, which has itself a value 'Person' of type", "received.\" ) if self.show_printmessages else None else: print( f\"GndConnector get_gnd_data()", "data_selection is not None: if type(self.gnd_id) == str: _new_dict =", "to receive data from api with the possibility to filter", "aliases will be extracted) or a list of str (specific", "\"\"\"establishes connection to api, from which norm data for entities", "(i.e. 'Person' to 'person') if type(_temp_category_data_form) == str: for _type", "\"@type\", \"str\", \"categorial\", { \"person\": \"person\", \"organisation\": \"organisation\", \"place\": \"place\",", "not defined correctly. using default api...\" ) if self.show_printmessages else", "not find {category} information for {gnd_id} in raw data. continuing", "{self.gnd_id} no data could be delivered by api\" ) if", "internal_call=True, show_printmessages=False ).loadfile_json() except FileNotFound: print( \"GndConnector: could not find", "the base category 'type' is assigned to 'person', if the", "not find gnd_apilist.json in config dir. creating file with default", "data type is json, preset gnd_id_to_test value refers to Goethe\"\"\"", "of this data is achieved with the help of key-value", "or not (see connectivitycheck_loop()) show_printmessages: show class internal printmessages on", "\"sameAs\": [\"sameAs\", [{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"],", "data_selection: if delivered, a normalized output is generated by renaming", "import FileReader, FileWriter from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions", "a list of str (specific aliases will be extracted)\"\"\" #", "FileNotFound class GndConnector: def __init__( self, gnd_id: Union[str, List[str], None]", "= 0, check_connectivity: bool = True, show_printmessages: bool = True,", ") -> None: \"\"\"establishes connection to api, from which norm", "if type(mode) == str else mode # selected_categories_alias = category_sets.get(mode)[1]", "== True: self.connectivitycheck_loop() else: print( \"GndConnector: initialization has been done", "self.show_printmessages else None self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i for", "categorial list with selfdefined string (i.e. ['Person', 'PoliticalLeader'] to 'person')", "one gnd id number and renames the keys and/or values.", "try: filereader = FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True, ) _temp_data", "checks if response data type is json, preset gnd_id_to_test value", "categories to get_gnd_data() only if they are defined in baseAlias", "to 'person') if type(_temp_category_data_form) == str: for _type in _temp_categorial_values:", "Union, List from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter from tei_entity_enricher.util.helper import", "from different keys from the raw data into new keys", "if self.show_printmessages else None else: print( f\"GndConnector get_gnd_data() status: for", "response data type is json, preset gnd_id_to_test value refers to", "except: return False if type(result) == dict: return True return", "-> bool: \"\"\"auxiliary method of connectivitycheck_loop(), checks a single api`s", "culturegraph api the value of the base category 'type' is", "None self.connection_established = True if _temp_data != None and _temp_data", "gnd id number(s) apiindex: index of selected api in list", "to be able to normalize data from different apis, defines", "-> dict: \"\"\"sub method, which extracts the key-value pairs from", "\"GndConnector return_complete_url() internal error: no gnd id number has been", "can not be executed at the moment.\" ) if self.show_printmessages", "in [i for i, _ in enumerate(self.apilist)]: print( \"GndConnector print_complete_url()", "at the moment.\" ) if self.show_printmessages else None return None", "response json data as values data_selection: if delivered, a normalized", "defined correctly. using default api...\" ) if self.show_printmessages else None", "== False: print( \"GndConnector connectivity error: after connectivity check no", "apiindex is not defined correctly. using default api...\" ) if", "api defined in self.apilist, formatted with a gnd id number", "0 if self.gnd_id is not None: if type(self.gnd_id) == str:", "{}, \"placeAliases\": {}, \"organizationAliases\": {}, }, { \"name\": \"lobid\", \"baseUrl\":", "alternatives...\" ) if self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print(", "_temp_data print( f\"GndConnector get_gnd_data() status: data for gnd id {self.gnd_id}", "filereader = FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False ) _temp_data =", "f\"GndConnector get_gnd_data() status: gnd id {index + 1} ({gnd}) of", "_type # replace found categorial list with selfdefined string (i.e.", "mode selected_categories_data = {} for category in selected_categories: _temp_data =", "as expected.\" ) if self.show_printmessages else None return None self.connection_established", "= True # filtering: build new dict with selected values,", "is not None: if type(self.gnd_id) == str: print( f\"GndConnector complete", "4. a categorization dict, if the original value type logic-wise", "this mapping dict) found in raw data, example 1: using", "save it gnd_id: gnd id number(s) apiindex: index of selected", "to a user-defined set of keys, for which the mapping", "executing sub method for filtering if data_selection is not None:", "list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector return_complete_url() internal error: no", "which the mapping is provided in self.apilist) \"\"\" if self.check_connectivity", "the current connector object. continuing attempt to receive gnd data", ") if self.show_printmessages else None return 0 else: print( \"GndConnector", "value\"\"\" if self.apiindex not in [i for i, _ in", "{category} information for {gnd_id} in raw data. continuing processing...\" )", "f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages else None elif", "self.gnd_id (list or str) selected by index value. returns 0", "from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions import FileNotFound class", "print_complete_url() error: apiindex is not defined correctly. using default api...\"", "in enumerate(self.apilist)] self.connection_established = True return 0 else: print( f\"GndConnector", "else None elif type(self.gnd_id) == list: print( f\"GndConnector complete URL", "self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found categorial string to selfdefined string (i.e.", "expected. checking for alternatives...\" ) if self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0])", "0 else: print( f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently", "single api in self.apilist (ascending) and setting self.apiindex to the", "index value. returns 0 or -1 for unittest purposes\"\"\" if", "a value 'Person' of type str in it, mode parameter", "self.apilist) response status code and checks if response data type", "None return -1 def return_complete_url(self, index: int = 0) ->", "set) # defining sub method for filtering def filter_received_data(gnd_id: str,", "== str: _temp_data = {} try: filereader = FileReader( filepath=self.return_complete_url(),", "this filtering process: the keys of 'baseAliases' dict define the", "so manually execute connectivitycheck_loop() method of the current connector object.", "json data as values data_selection: if delivered, a normalized output", "a category (defined keys of this mapping dict) based on", "int: \"\"\"print baseUrl string of the currently selected api defined", "connectivitycheck_loop() method of the current connector object. continuing attempt to", "'Person' to 'person') if type(_temp_category_data_form) == str: for _type in", "list) 1. the original key name, 2. the original value", "pairs from the raw data received from api for one", "import local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions import FileNotFound class GndConnector: def", "self.apilist: List[dict] = [ { \"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\":", "print( \"GndConnector connectivity error: after connectivity check no connection could", "key-value pairs from the raw data received from api for", "{ \"person\": \"person\", \"organisation\": \"organisation\", \"place\": \"place\", }, ], \"name\":", "return -1 def print_complete_url(self, index: int = 0) -> int:", "FileWriter class to save it gnd_id: gnd id number(s) apiindex:", "been passed to connector object yet.\" ) if self.show_printmessages else", "extracted) or a list of str (specific aliases will be", "(specific aliases will be extracted)\"\"\" # todo: handle additional alias", "list mode = select specific aliases from base set) #", "gnd_id: Union[str, List[str], None] = None, apiindex: int = 0,", "self.show_printmessages else None result[gnd] = _temp_data print( f\"GndConnector get_gnd_data() status:", "mode == \"base\" else mode selected_categories_data = {} for category", "{self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" ) if self.show_printmessages else None self.apiindex =", "error: no gnd id number has been passed to connector", "List[str], None] = gnd_id self.apiindex: int = apiindex self.apilist_filepath: str", "if type(_temp_category_data_form) == str: for _type in _temp_categorial_values: if _temp_data", "check error: none of the listed apis is responding as", "of those api, which is first to pass the check", "value refers to Goethe\"\"\" try: result: dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test),", "\"place\", \"organization\" or a custom string refering to a user-defined", "True else None self.check_connectivity: bool = check_connectivity self.connection_established: bool =", "note: could not find {category} information for {gnd_id} in raw", "= {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id) == list: _new_dict =", "if self.show_printmessages else None return None result = {} if", "== list: _new_dict = {} for key in result: _new_dict[key]", "change found categorial string to selfdefined string (i.e. 'Person' to", "None # handling of categorical data types if ( len(_temp_data)", "in gnd_apilist.json by user # category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'],", "\"118540238\") -> bool: \"\"\"auxiliary method of connectivitycheck_loop(), checks a single", "original value type (logic-wise: 'categorial' or 'nominal'), 4. a categorization", "self.connectivitycheck_loop() else: print( \"GndConnector: initialization has been done without connectivity", "of selected api in list defined in self.apilist check_connectivity: execute", "as a value, which has itself a value 'Person' of", "dict) based on specific values (defined in the values of", "result[gnd] = _temp_data print( f\"GndConnector get_gnd_data() status: gnd id {index", "{}, }, ] self.apiindex: int = 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist,", "origin=\"web\", internal_call=True, show_printmessages=True, ) _temp_data = filereader.loadfile_json() except: print( f\"GndConnector", "== False: self.check_connectivity == True if len(self.remaining_apis_to_check) > 0: if", "to the value of those api, which is first to", "{ \"type\": [ \"type\", [\"str\"], \"categorial\", { \"person\": \"Person\", \"organisation\":", "= _type # replace found categorial list with selfdefined string", "from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary", "type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3]", "have not been checked yet in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if", "api\" ) if self.show_printmessages else None return None elif type(self.gnd_id)", "== str else mode # selected_categories_alias = category_sets.get(mode)[1] if type(mode)", "None: if type(self.gnd_id) == str: print( f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\"", "\"PlaceOrGeographicName\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"],", "f\"GndConnector get_gnd_data() status: for gnd id {self.gnd_id} no data could", "key-value mapping information stored in self.apilist) can be \"base\" (all", "list as a value, which has itself a value 'Person'", "_temp_data != False: result[self.gnd_id] = _temp_data print( f\"GndConnector get_gnd_data() status:", "aliases for filtering purposes (see get_gnd_data()) connection_established: data from an", "new keys (purpose: json data delivered by different apis comes", "[ { \"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\": [", "error in get_gnd_data() method: could not load resource from api", "Union[str, List[str], None] = None, apiindex: int = 0, check_connectivity:", "# filtering: build new dict with selected values, which should", "i, _ in enumerate(self.apilist)] self.connection_established = True return 0 else:", "self.gnd_id (list or str) selected by index value\"\"\" if self.apiindex", "original key name, 2. the original value type (python-wise: i.e.", "filtering if data_selection is not None: if type(self.gnd_id) == str:", "i, _ in enumerate(self.apilist)] if self.check_connectivity == True: self.connectivitycheck_loop() else:", "checked yet. to do so manually execute connectivitycheck_loop() method of", "default gnd_apilist.json in config folder.\" ) if self.show_printmessages == True", "key '@type' with the value 'person' of type str, example", "True return False def connectivitycheck_loop(self) -> int: \"\"\"recursive connectivity check,", "'person') elif type(_temp_category_data_form) == list: for _type in _temp_categorial_values: if", "filtered or unfiltered response json data as values data_selection: if", "refering to a user-defined set of keys, for which the", "(python-wise: i.e. 'str' or '[str]'), 3. the original value type", "to receive gnd data from {self.apilist[self.apiindex]['name']} api...\" ) if self.show_printmessages", "# => allow parsing a list of categories to get_gnd_data()", "raw data into new keys (purpose: json data delivered by", "Cache class for further processing or FileWriter class to save", "information to assign a category (defined keys of this mapping", "+ 1} ({gnd}) of {len(self.gnd_id)} processed\" ) if self.show_printmessages else", "type(self.gnd_id) == str: _temp_data = {} try: filereader = FileReader(", "different apis comes in different key-value-structures; normalization of this data", "selected_categories_data = {} for category in selected_categories: _temp_data = []", "expected.\" ) if self.show_printmessages else None return -1 def print_complete_url(self,", "gnd_apilist.json in config dir. creating file with default settings...\" )", "local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions import FileNotFound class GndConnector: def __init__(", "connectivitycheck_loop(), checks a single api`s (from self.apilist) response status code", "False self.remaining_apis_to_check: list = [i for i, _ in enumerate(self.apilist)]", "elif type(_temp_category_data_form) == list: for _type in _temp_categorial_values: if _temp_categorial_values[_type]", "api...\" ) if self.show_printmessages else None self.apiindex = 0 if", "else mode # selected_categories_alias = category_sets.get(mode)[1] if type(mode) == str", "filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json() except FileNotFound: print( \"GndConnector: could", "self.apiindex to the value of those api, which is first", "baseAliases (i.e. [\"type\", \"name\"]) (not yet implemented: can be a", "their value list denotates (in order of the list) 1.", "\"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\": [ \"@type\", \"str\",", "been done without connectivity check.\" ) if self.show_printmessages else None", "from the raw data received from api for one gnd", "in the values of this mapping dict) found in raw", "returned (base mode = all base aliases from apilist definition.", "Union[str, None]: \"\"\"return baseUrl string of the currently selected api", "already been received or not remaining_apis_to_check: list of apiindex values,", "_temp_data = {} try: filereader = FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True,", "itself a value 'Person' of type str in it, mode", "for index, gnd in enumerate(self.gnd_id): _temp_data = {} try: filereader", "they are defined in baseAlias set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories", "\"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\":", "data json object has a key 'type' with a list", "= result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data = [] print( f\"GndConnector get_gnd_data()", "path to apilist config file apilist: list of dicts as", "an api has already been received or not remaining_apis_to_check: list", "= {} if type(self.gnd_id) == str: _temp_data = {} try:", "check passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" ) if self.show_printmessages", "in _temp_categorial_values: if _temp_data == _temp_categorial_values[_type]: _temp_data = _type #", "if the original value type logic-wise is 'categorial': it delivers", "gnd data from {self.apilist[self.apiindex]['name']} api...\" ) if self.show_printmessages else None", "_temp_data = _type # replace found categorial list with selfdefined", "dict is created, having gnd id numbers as keys and", "parsing a list of categories to get_gnd_data() only if they", "filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True, ) _temp_data = filereader.loadfile_json() except: print(", "replace found categorial list with selfdefined string (i.e. ['Person', 'PoliticalLeader']", "filtering process: the keys of 'baseAliases' dict define the new", "the mapping is provided in self.apilist) \"\"\" if self.check_connectivity ==", "as keys and filtered or unfiltered response json data as", "to apis have not been checked yet. to do so", "len(self.remaining_apis_to_check) > 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print( f\"GndConnector: connectivity", "in raw data, example 1: using culturegraph api the value", "status code and checks if response data type is json,", "type (logic-wise: 'categorial' or 'nominal'), 4. a categorization dict, if", "\"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {},", "# todo: handle additional alias definition sets in gnd_apilist.json by", "in self.apilist) can be \"base\" (all baseAliases data is provided:", "_temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found categorial string to selfdefined", "folder.\" ) if self.show_printmessages == True else None self.check_connectivity: bool", "True, ) -> None: \"\"\"establishes connection to api, from which", "_temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data = [] print( f\"GndConnector", "or FileWriter class to save it gnd_id: gnd id number(s)", "data json object has a key '@type' with the value", "for _type in _temp_categorial_values: if _temp_data == _temp_categorial_values[_type]: _temp_data =", "config folder.\" ) if self.show_printmessages == True else None self.check_connectivity:", "of gnd id number {index + 1} in passed gnd", "+ 1} in passed gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if", "of categorical data types if ( len(_temp_data) > 0 and", "self.apilist (ascending) and setting self.apiindex to the value of those", "refers to Goethe\"\"\" try: result: dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\",", "in self.apilist are used for this filtering process: the keys", "error: apiindex is not defined correctly. using default api...\" )", "= 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector __init__():", "defining sub method for filtering def filter_received_data(gnd_id: str, mode: Union[str,", "moment.\" ) if self.show_printmessages else None return None result =", "check.\" ) if self.show_printmessages else None def connectivitycheck_single(self, index_to_test: int,", "can be a list of one or more baseAliases (i.e.", "be \"base\" (all baseAliases data is provided: \"type\", \"name\", \"furtherNames\",", "List from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter from tei_entity_enricher.util.helper import local_save_path,", "loaded data can be passed to an instance of Cache", "], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\":", "aliases will be extracted)\"\"\" # todo: handle additional alias definition", "selfdefined string (i.e. 'Person' to 'person') if type(_temp_category_data_form) == str:", "of the currently selected api defined in self.apilist, formatted with", "is provided in self.apilist) \"\"\" if self.check_connectivity == False: print(", "gnd id {index + 1} ({gnd}) of {len(self.gnd_id)} no data", "class GndConnector: def __init__( self, gnd_id: Union[str, List[str], None] =", "is achieved with the help of key-value mapping information stored", "= FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json() except FileNotFound: print(", "if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print( f\"GndConnector: connectivity check passed, connection", "print( f\"GndConnector: connectivity check passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\"", "GndConnector: def __init__( self, gnd_id: Union[str, List[str], None] = None,", "will be extracted)\"\"\" # todo: handle additional alias definition sets", "no connection could has been established to any of the", "if self.show_printmessages else None # handling of categorical data types", "else None elif self.connection_established == False: print( \"GndConnector connectivity error:", "= None, apiindex: int = 0, check_connectivity: bool = True,", "check_connectivity: bool = True, show_printmessages: bool = True, ) ->", "str (specific aliases will be extracted)\"\"\" # todo: handle additional", "get_gnd_data() filtering note: could not find {category} information for {gnd_id}", "from the raw data into new keys (purpose: json data", "'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # } # selected_categories_list = category_sets.get(mode)[0] if", "None result = {} if type(self.gnd_id) == str: _temp_data =", "class for further processing or FileWriter class to save it", "{index + 1} ({gnd}) of {len(self.gnd_id)} no data could be", "[\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\": [", "from which norm data for entities of Deutsche Nationalbibliothek´s database", "filter results, a dict is created, having gnd id numbers", "number of self.gnd_id (list or str) selected by index value\"\"\"", "in [i for i, _ in enumerate(self.apilist)]: print( \"GndConnector return_complete_url()", "[\"str\"], \"categorial\", { \"person\": \"Person\", \"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\", },", "'PoliticalLeader'] to 'person') elif type(_temp_category_data_form) == list: for _type in", "None: if type(self.gnd_id) == str: _new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)}", "(i.e. ['Person', 'PoliticalLeader'] to 'person') elif type(_temp_category_data_form) == list: for", "api, which is first to pass the check successfully. returns", "category in selected_categories: _temp_data = [] try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]]", "which is first to pass the check successfully. returns 0", "\"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, },", "else None return None elif type(self.gnd_id) == list: for index,", "(defined keys of this mapping dict) based on specific values", "data from different apis, defines api`s url and aliases for", "def __init__( self, gnd_id: Union[str, List[str], None] = None, apiindex:", "{self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages else None return 0 else: print(", "set of keys, for which the mapping is provided in", "error: after connectivity check no connection could has been established", "should be returned (base mode = all base aliases from", "True return 0 else: print( f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api", "object has a key 'type' with a list as a", "self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print( \"GndConnector connectivity check error: none of", "value 'Person' of type str in it, mode parameter accepts", "'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # } # selected_categories_list =", "continuing processing...\" ) if self.show_printmessages else None # handling of", "1} ({gnd}) of {len(self.gnd_id)} processed\" ) if self.show_printmessages else None", "connections to apis have not been checked yet. to do", "['Person', 'PoliticalLeader'] to 'person') elif type(_temp_category_data_form) == list: for _type", "todo: handle additional alias definition sets in gnd_apilist.json by user", "unittest purposes\"\"\" if self.check_connectivity == False: self.check_connectivity == True if", "the raw data received from api for one gnd id", "if self.check_connectivity == True: self.connectivitycheck_loop() else: print( \"GndConnector: initialization has", "print( f\"GndConnector __init__(): could not create default gnd_apilist.json in config", "api`s url and aliases for filtering purposes (see get_gnd_data()) connection_established:", "def connectivitycheck_single(self, index_to_test: int, gnd_id_to_test: str = \"118540238\") -> bool:", "self.apilist are used for this filtering process: the keys of", "has been done without connectivity check.\" ) if self.show_printmessages else", "key-value-structures; normalization of this data is achieved with the help", "= {} for category in selected_categories: _temp_data = [] try:", "be extracted)\"\"\" # todo: handle additional alias definition sets in", "id number of self.gnd_id (list or str) selected by index", "dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except: return", "not been checked yet. to do so manually execute connectivitycheck_loop()", "= True, show_printmessages: bool = True, ) -> None: \"\"\"establishes", "to do so manually execute connectivitycheck_loop() method of the current", "api with the possibility to filter results, a dict is", "list = [i for i, _ in enumerate(self.apilist)] if self.check_connectivity", "for _type in _temp_categorial_values: if _temp_categorial_values[_type] in _temp_data: _temp_data =", "apiindex: int = 0, check_connectivity: bool = True, show_printmessages: bool", "defined in self.apilist check_connectivity: execute connectivity check in __init__() or", "check_connectivity: execute connectivity check in __init__() or not (see connectivitycheck_loop())", "from base set) # defining sub method for filtering def", "= category_sets.get(mode)[0] if type(mode) == str else mode # selected_categories_alias", "-1 for unittest purposes\"\"\" if self.apiindex not in [i for", "connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently not responding as expected.", "not been checked yet in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if show_printmessages", "(all baseAliases data is provided: \"type\", \"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\")", "config dir. creating file with default settings...\" ) if self.show_printmessages", "stored in self.apilist) can be \"base\" (all baseAliases data is", "number of self.gnd_id (list or str) selected by index value.", "the original value type (logic-wise: 'categorial' or 'nominal'), 4. a", "origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json() except FileNotFound: print( \"GndConnector: could not", "self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector return_complete_url() internal error: no gnd id", "could has been established to any of the available apis.", "key name, 2. the original value type (python-wise: i.e. 'str'", "_ in enumerate(self.apilist)] if self.check_connectivity == True: self.connectivitycheck_loop() else: print(", "is created, having gnd id numbers as keys and filtered", "the original value type (python-wise: i.e. 'str' or '[str]'), 3.", "a key 'type' with a list as a value, which", "\"person\": \"person\", \"organisation\": \"organisation\", \"place\": \"place\", }, ], \"name\": [\"preferredName\",", "api`s (from self.apilist) response status code and checks if response", "correctly. using default api...\" ) if self.show_printmessages else None self.apiindex", "can be a \"person\", \"place\", \"organization\" or a custom string", "\"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\":", "build new dict with selected values, which should be returned", "or 'nominal'), 4. a categorization dict, if the original value", "=> allow parsing a list of categories to get_gnd_data() only", "the value 'person' of type str, example 2: using lobid", "from {self.apilist[self.apiindex]['name']} api...\" ) if self.show_printmessages else None elif self.connection_established", "has itself a value 'Person' of type str in it,", "delivers mapping information to assign a category (defined keys of", "be extracted) or a list of str (specific aliases will", "after connectivity check no connection could has been established to", "the new key names, their value list denotates (in order", "{ \"type\": [ \"@type\", \"str\", \"categorial\", { \"person\": \"person\", \"organisation\":", "is json, preset gnd_id_to_test value refers to Goethe\"\"\" try: result:", "== _temp_categorial_values[_type]: _temp_data = _type # replace found categorial list", "base aliases will be extracted) or a list of str", "to an instance of Cache class for further processing or", "api established.\" ) if self.show_printmessages else None self.apiindex = self.remaining_apis_to_check[0]", "FileWriter from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions import FileNotFound", "return -1 def return_complete_url(self, index: int = 0) -> Union[str,", "\"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, }, ] self.apiindex: int", "additional alias definition sets in gnd_apilist.json by user # category_sets", "List[str], None] = None) -> Union[dict, None]: \"\"\"method to receive", "norm data for entities of Deutsche Nationalbibliothek´s database is retrieved,", "currently not responding as expected. checking for alternatives...\" ) if", "check in __init__() or not (see connectivitycheck_loop()) show_printmessages: show class", "[{\"preferredName\": \"str\"}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\":", "type(_temp_category_data_form) == str: for _type in _temp_categorial_values: if _temp_data ==", "of the listed apis is responding as expected.\" ) if", "{self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages else None elif type(self.gnd_id) == list:", "baseAlias set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories if mode", "filter_received_data(gnd_id: str, mode: Union[str, List[str]]) -> dict: \"\"\"sub method, which", "\"categorial\", { \"person\": \"Person\", \"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\", }, ],", "self.connection_established = True # filtering: build new dict with selected", "self.show_printmessages else None return -1 def return_complete_url(self, index: int =", "mode = all base aliases from apilist definition. list mode", "category (defined keys of this mapping dict) based on specific", "self.apiindex = 0 if self.gnd_id is not None: if type(self.gnd_id)", "if self.show_printmessages else None self.connection_established = True # filtering: build", "list with selfdefined string (i.e. ['Person', 'PoliticalLeader'] to 'person') elif", "= FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except: return False", "FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json() except FileNotFound: print( \"GndConnector:", "{self.gnd_id} received.\" ) if self.show_printmessages else None else: print( f\"GndConnector", "tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions import FileNotFound class GndConnector:", "by user # category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom':", "value type logic-wise is 'categorial': it delivers mapping information to", "= filereader.loadfile_json() except: print( \"GndConnector connectivity error in get_gnd_data() method:", "selfdefined string (i.e. ['Person', 'PoliticalLeader'] to 'person') elif type(_temp_category_data_form) ==", "self.gnd_id is not None: if type(self.gnd_id) == str: print( f\"GndConnector", "None) -> Union[dict, None]: \"\"\"method to receive data from api", "Union[str, List[str], None] = None) -> Union[dict, None]: \"\"\"method to", "defines api`s url and aliases for filtering purposes (see get_gnd_data())", "selected values, which should be returned (base mode = all", "data from different keys from the raw data into new", "# selected_categories_list = category_sets.get(mode)[0] if type(mode) == str else mode", "has been passed to connector object yet.\" ) if self.show_printmessages", "and re-sorting data from different keys from the raw data", "purposes\"\"\" if self.apiindex not in [i for i, _ in", "== dict: return True return False def connectivitycheck_loop(self) -> int:", "2: using lobid api the value of the base category", "listed apis is responding as expected.\" ) if self.show_printmessages else", "established.\" ) if self.show_printmessages else None self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check", "(list or str) selected by index value\"\"\" if self.apiindex not", "of categories to get_gnd_data() only if they are defined in", "== str: print( f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages", "len(_temp_data) > 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] ==", "in config folder.\" ) if self.show_printmessages == True else None", "a key '@type' with the value 'person' of type str,", "first to pass the check successfully. returns 0 or -1", "= 0) -> Union[str, None]: \"\"\"return baseUrl string of the", "gnd_id_to_test value refers to Goethe\"\"\" try: result: dict = FileReader(", "and renames the keys and/or values. alias definitions in self.apilist", "= [] print( f\"GndConnector get_gnd_data() filtering note: could not find", "check successfully. returns 0 or -1 for unittest purposes\"\"\" if", "int = apiindex self.apilist_filepath: str = os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\")", "base_categories if mode == \"base\" else mode selected_categories_data = {}", "value of the base category 'type' is assigned to 'person',", "int = 0) -> Union[str, None]: \"\"\"return baseUrl string of", "apilist config file apilist: list of dicts as configuration data", "(list or str) selected by index value. returns 0 or", "\"name\"]) (not yet implemented: can be a \"person\", \"place\", \"organization\"", "dict define the new key names, their value list denotates", "\"pseudonyms\": [ \"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\", ], }, \"personAliases\": {},", "else None return None def get_gnd_data(self, data_selection: Union[str, List[str], None]", "None return None elif type(self.gnd_id) == list: for index, gnd", "mode parameter accepts str 'base' (all base aliases will be", "api\" ) if self.show_printmessages else None result[gnd] = _temp_data print(", "of dicts as configuration data set, delivers a mapping to", "of this mapping dict) found in raw data, example 1:", "except KeyError: _temp_data = [] print( f\"GndConnector get_gnd_data() filtering note:", "if type(self.gnd_id) == str: _new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif", "mapping to be able to normalize data from different apis,", "\"place\": \"PlaceOrGeographicName\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\",", ") if self.show_printmessages else None self.apiindex = 0 if self.gnd_id", "dict) ): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] # change", "to normalize data from different apis, defines api`s url and", "\"furtherNames\", \"sameAs\", \"pseudonyms\") can be a list of one or", "has a key '@type' with the value 'person' of type", "self.apilist: Union[dict, None] = FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json()", "not in [i for i, _ in enumerate(self.apilist)]: print( \"GndConnector", "has already been received or not remaining_apis_to_check: list of apiindex", "-> int: \"\"\"print baseUrl string of the currently selected api", "string refering to a user-defined set of keys, for which", "self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found categorial string to", "string (i.e. 'Person' to 'person') if type(_temp_category_data_form) == str: for", "pass the check successfully. returns 0 or -1 for unittest", "None return 0 else: print( \"GndConnector print_complete_url() internal error: no", "_temp_data return selected_categories_data # executing sub method for filtering if", "mapping dict) based on specific values (defined in the values", "aliases from base set) # defining sub method for filtering", "= self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found categorial string", "\"\"\"print baseUrl string of the currently selected api defined in", "from api with the possibility to filter results, a dict", "FileNotFound: print( \"GndConnector: could not find gnd_apilist.json in config dir.", "method, which extracts the key-value pairs from the raw data", "to selfdefined string (i.e. 'Person' to 'person') if type(_temp_category_data_form) ==", "get_gnd_data() only if they are defined in baseAlias set? base_categories", "and/or values. alias definitions in self.apilist are used for this", "int, gnd_id_to_test: str = \"118540238\") -> bool: \"\"\"auxiliary method of", "index: int = 0) -> Union[str, None]: \"\"\"return baseUrl string", "connectivity error: after connectivity check no connection could has been", "get_gnd_data() status: data for gnd id {self.gnd_id} received.\" ) if", "{}, \"placeAliases\": {}, \"organizationAliases\": {}, }, ] self.apiindex: int =", "None result[gnd] = _temp_data print( f\"GndConnector get_gnd_data() status: gnd id", "to get_gnd_data() only if they are defined in baseAlias set?", ") if self.show_printmessages == True else None self.check_connectivity: bool =", "None return None result = {} if type(self.gnd_id) == str:", "to filter results, a dict is created, having gnd id", "for alternatives...\" ) if self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else:", "'person') if type(_temp_category_data_form) == str: for _type in _temp_categorial_values: if", "it gnd_id: gnd id number(s) apiindex: index of selected api", "renames the keys and/or values. alias definitions in self.apilist are", "bool = True, ) -> None: \"\"\"establishes connection to api,", "str, mode: Union[str, List[str]]) -> dict: \"\"\"sub method, which extracts", "i, _ in enumerate(self.apilist)]: print( \"GndConnector return_complete_url() error: apiindex is", "not None: if type(self.gnd_id) == str: print( f\"GndConnector complete URL:", "user # category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()),", "as expected.\" ) if self.show_printmessages else None return -1 def", "which norm data for entities of Deutsche Nationalbibliothek´s database is", "None else: print( f\"GndConnector get_gnd_data() status: for gnd id {self.gnd_id}", "type(mode) == str else mode # selected_categories_alias = category_sets.get(mode)[1] if", "dict) found in raw data, example 1: using culturegraph api", "\"str\"}], \"nominal\"], \"pseudonyms\": [ \"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\", ], },", "if the raw data json object has a key 'type'", "more baseAliases (i.e. [\"type\", \"name\"]) (not yet implemented: can be", "0 or -1 for unittest purposes\"\"\" if self.check_connectivity == False:", "categorization dict, if the original value type logic-wise is 'categorial':", "return False def connectivitycheck_loop(self) -> int: \"\"\"recursive connectivity check, checking", "be returned (base mode = all base aliases from apilist", "str in it, mode parameter accepts str 'base' (all base", "show class internal printmessages on runtime or not apilist_filepath: path", "self.connection_established: bool = False self.remaining_apis_to_check: list = [i for i,", "return None elif type(self.gnd_id) == list: for index, gnd in", "the currently selected api defined in self.apilist, formatted with a", "index: int = 0) -> int: \"\"\"print baseUrl string of", "= show_printmessages self.gnd_id: Union[str, List[str], None] = gnd_id self.apiindex: int", "on specific values (defined in the values of this mapping", "List[str], None] = None, apiindex: int = 0, check_connectivity: bool", "example 2: using lobid api the value of the base", "with the possibility to filter results, a dict is created,", "selected_categories = base_categories if mode == \"base\" else mode selected_categories_data", "# executing sub method for filtering if data_selection is not", "of {len(self.gnd_id)} no data could be delivered by api\" )", ") _temp_data = filereader.loadfile_json() except: print( f\"GndConnector get_gnd_data() status: for", "been checked yet in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if show_printmessages else", "self.check_connectivity == False: print( f\"GndConnector note: connections to apis have", "None and _temp_data != False: result[self.gnd_id] = _temp_data print( f\"GndConnector", "\"type\", [\"str\"], \"categorial\", { \"person\": \"Person\", \"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\",", "= _temp_data return selected_categories_data # executing sub method for filtering", "0 or -1 for unittest purposes\"\"\" if self.apiindex not in", "from different apis, defines api`s url and aliases for filtering", "for unittest purposes\"\"\" if self.apiindex not in [i for i,", "of self.gnd_id (list or str) selected by index value. returns", "is first to pass the check successfully. returns 0 or", "for category in selected_categories: _temp_data = [] try: _temp_data =", ") if self.show_printmessages else None self.connection_established = True # filtering:", "api has already been received or not remaining_apis_to_check: list of", "id number has been passed to connector object yet.\" )", "json data delivered by different apis comes in different key-value-structures;", "self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print( \"GndConnector connectivity check", "result: dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except:", "}, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, }, { \"name\":", "any of the available apis. gnd data queries can not", "every single api in self.apilist (ascending) and setting self.apiindex to", "= list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories if mode == \"base\" else", "{} if type(self.gnd_id) == str: _temp_data = {} try: filereader", "_temp_data = [] try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data", "data set, delivers a mapping to be able to normalize", "value type (python-wise: i.e. 'str' or '[str]'), 3. the original", "could not create default gnd_apilist.json in config folder.\" ) if", "== list: for _type in _temp_categorial_values: if _temp_categorial_values[_type] in _temp_data:", "accepts str 'base' (all base aliases will be extracted) or", "the help of key-value mapping information stored in self.apilist) can", "str: _new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id) == list:", "\"person\": \"Person\", \"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\", }, ], \"name\": [\"preferredName\",", "self.show_printmessages else None return None elif type(self.gnd_id) == list: for", "], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, }, ]", "= [i for i, _ in enumerate(self.apilist)] if self.check_connectivity ==", "str) selected by index value. returns 0 or -1 for", "printmessages on runtime or not apilist_filepath: path to apilist config", "-1 def return_complete_url(self, index: int = 0) -> Union[str, None]:", "f\"GndConnector: connectivity check passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" )", "of keys, for which the mapping is provided in self.apilist)", "self.check_connectivity: bool = check_connectivity self.connection_established: bool = False self.remaining_apis_to_check: list", "and setting self.apiindex to the value of those api, which", "found in raw data, example 1: using culturegraph api the", "data_selection)} elif type(self.gnd_id) == list: _new_dict = {} for key", "== \"base\" else mode selected_categories_data = {} for category in", "({gnd}) of {len(self.gnd_id)} processed\" ) if self.show_printmessages else None self.connection_established", "from an api has already been received or not remaining_apis_to_check:", "return selected_categories_data # executing sub method for filtering if data_selection", "if self.show_printmessages else None return -1 def print_complete_url(self, index: int", "with the value 'person' of type str, example 2: using", "None self.apilist: List[dict] = [ { \"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\",", "attempt to receive gnd data from {self.apilist[self.apiindex]['name']} api...\" ) if", "delivered, a normalized output is generated by renaming keys and", "to apilist config file apilist: list of dicts as configuration", "list: for index, gnd in enumerate(self.gnd_id): _temp_data = {} try:", "json object has a key '@type' with the value 'person'", "possibility to filter results, a dict is created, having gnd", "method for filtering def filter_received_data(gnd_id: str, mode: Union[str, List[str]]) ->", "list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages else None return 0 else:", "self.connection_established = True if _temp_data != None and _temp_data !=", "_temp_data print( f\"GndConnector get_gnd_data() status: gnd id {index + 1}", "list of dicts as configuration data set, delivers a mapping", "none of the listed apis is responding as expected.\" )", "apis comes in different key-value-structures; normalization of this data is", "print( \"GndConnector connectivity error in get_gnd_data() method: could not load", "'base' (all base aliases will be extracted) or a list", "else: print( \"GndConnector print_complete_url() internal error: no gnd id number", "try: self.apilist: Union[dict, None] = FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False", "in config dir. creating file with default settings...\" ) if", "for {gnd_id} in raw data. continuing processing...\" ) if self.show_printmessages", "= filereader.loadfile_json() except: print( f\"GndConnector get_gnd_data() status: for gnd id", "in enumerate(self.apilist)]: print( \"GndConnector print_complete_url() error: apiindex is not defined", "a gnd id number of self.gnd_id (list or str) selected", "else None return -1 def return_complete_url(self, index: int = 0)", "print( \"GndConnector return_complete_url() error: apiindex is not defined correctly. using", "0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print( f\"GndConnector: connectivity check passed,", "base aliases from apilist definition. list mode = select specific", "\"str\", \"categorial\", { \"person\": \"person\", \"organisation\": \"organisation\", \"place\": \"place\", },", "internal_call=True, show_printmessages=True, ) _temp_data = filereader.loadfile_json() except: print( f\"GndConnector get_gnd_data()", "\"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\": [ \"type\", [\"str\"],", "if the raw data json object has a key '@type'", "using default api...\" ) if self.show_printmessages else None self.apiindex =", "a normalized output is generated by renaming keys and re-sorting", "provided: \"type\", \"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\") can be a list", ").loadfile_json() except: return False if type(result) == dict: return True", "received from api for one gnd id number and renames", "= None) -> Union[dict, None]: \"\"\"method to receive data from", "base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories if mode == \"base\"", "connectivity check.\" ) if self.show_printmessages else None def connectivitycheck_single(self, index_to_test:", "new dict with selected values, which should be returned (base", "self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i for i, _ in enumerate(self.apilist)] self.connection_established", "== list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector return_complete_url() internal error:", "apilist_filepath: path to apilist config file apilist: list of dicts", "None, apiindex: int = 0, check_connectivity: bool = True, show_printmessages:", "dict, if the original value type logic-wise is 'categorial': it", "if type(self.gnd_id) == str: print( f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" )", "runtime or not apilist_filepath: path to apilist config file apilist:", "can be passed to an instance of Cache class for", "purposes (see get_gnd_data()) connection_established: data from an api has already", "[ \"@type\", \"str\", \"categorial\", { \"person\": \"person\", \"organisation\": \"organisation\", \"place\":", "a categorization dict, if the original value type logic-wise is", "api is currently not responding as expected. checking for alternatives...\"", "[ \"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\":", "print( f\"GndConnector get_gnd_data() status: for gnd id {self.gnd_id} no data", "custom string refering to a user-defined set of keys, for", "Union[dict, None] = FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json() except", "api, from which norm data for entities of Deutsche Nationalbibliothek´s", "in self.apilist check_connectivity: execute connectivity check in __init__() or not", "type(self.gnd_id) == list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector return_complete_url() internal", "\"nominal\"], \"pseudonyms\": [ \"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\", ], }, \"personAliases\":", "= apiindex self.apilist_filepath: str = os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\") try:", "{} for key in result: _new_dict[key] = filter_received_data(key, data_selection) result", "order of the list) 1. the original key name, 2.", "resource from api as expected.\" ) if self.show_printmessages else None", "connectivity check in __init__() or not (see connectivitycheck_loop()) show_printmessages: show", "is not None: if type(self.gnd_id) == str: _new_dict = {list(result.keys())[0]:", "print( \"GndConnector: initialization has been done without connectivity check.\" )", "print( f\"GndConnector note: connections to apis have not been checked", "{ \"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\": [ \"@type\",", "= os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\") try: self.apilist: Union[dict, None] =", "yet. to do so manually execute connectivitycheck_loop() method of the", "have not been checked yet. to do so manually execute", "[i for i, _ in enumerate(self.apilist)] if self.check_connectivity == True:", "in self.apilist) \"\"\" if self.check_connectivity == False: print( f\"GndConnector note:", "self, gnd_id: Union[str, List[str], None] = None, apiindex: int =", "show_printmessages: bool = True, ) -> None: \"\"\"establishes connection to", "gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages else None return", "0) -> int: \"\"\"print baseUrl string of the currently selected", "\"\"\"auxiliary method of connectivitycheck_loop(), checks a single api`s (from self.apilist)", "return None self.connection_established = True if _temp_data != None and", "None] = None, apiindex: int = 0, check_connectivity: bool =", "print(\"initializing GndConnector..\") if show_printmessages else None self.show_printmessages: bool = show_printmessages", ") if self.show_printmessages else None return None self.connection_established = True", "in it, mode parameter accepts str 'base' (all base aliases", "if self.check_connectivity == False: self.check_connectivity == True if len(self.remaining_apis_to_check) >", "the value of those api, which is first to pass", "self.check_connectivity == True if len(self.remaining_apis_to_check) > 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) ==", "_temp_data = {} try: filereader = FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True,", "values. alias definitions in self.apilist are used for this filtering", "\"str\"}], \"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\": [\"str\"]}], \"nominal\",", "lobid api the value of the base category 'type' is", "list denotates (in order of the list) 1. the original", "with selected values, which should be returned (base mode =", "\"\"\"sub method, which extracts the key-value pairs from the raw", "processed\" ) if self.show_printmessages else None self.connection_established = True #", "= base_categories if mode == \"base\" else mode selected_categories_data =", "be a \"person\", \"place\", \"organization\" or a custom string refering", "(defined in the values of this mapping dict) found in", "return_complete_url() internal error: no gnd id number has been passed", "expected.\" ) if self.show_printmessages else None return None self.connection_established =", "normalization of this data is achieved with the help of", "not create default gnd_apilist.json in config folder.\" ) if self.show_printmessages", "data from api with the possibility to filter results, a", "\"surname\": [\"str\"]}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\":", "print( \"GndConnector connectivity check error: none of the listed apis", "list: _new_dict = {} for key in result: _new_dict[key] =", "entities of Deutsche Nationalbibliothek´s database is retrieved, loaded data can", "or a list of str (specific aliases will be extracted)\"\"\"", "{ \"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\": [ \"type\",", "self.show_printmessages else None # handling of categorical data types if", "return None def get_gnd_data(self, data_selection: Union[str, List[str], None] = None)", "of 'baseAliases' dict define the new key names, their value", "gnd id numbers as keys and filtered or unfiltered response", "using lobid api the value of the base category 'type'", "\"GndConnector connectivity check error: none of the listed apis is", ") if self.show_printmessages else None return None result = {}", "mapping information to assign a category (defined keys of this", "sub method for filtering def filter_received_data(gnd_id: str, mode: Union[str, List[str]])", "}, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, }, ] self.apiindex:", "information stored in self.apilist) can be \"base\" (all baseAliases data", "file with default settings...\" ) if self.show_printmessages else None self.apilist:", "# category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom']", "= _type selected_categories_data[category] = _temp_data return selected_categories_data # executing sub", "\"baseAliases\": { \"type\": [ \"type\", [\"str\"], \"categorial\", { \"person\": \"Person\",", "passed to an instance of Cache class for further processing", "in passed gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages else", "__init__( self, gnd_id: Union[str, List[str], None] = None, apiindex: int", "= category_sets.get(mode)[1] if type(mode) == str else 'baseAliases' # =>", "self.apiindex: int = apiindex self.apilist_filepath: str = os.path.join(local_save_path, \"config\", \"postprocessing\",", "could be delivered by api\" ) if self.show_printmessages else None", "aliases from apilist definition. list mode = select specific aliases", "gnd id {index + 1} ({gnd}) of {len(self.gnd_id)} processed\" )", "\"organizationAliases\": {}, }, ] self.apiindex: int = 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath))", "return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) == list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print(", "data delivered by different apis comes in different key-value-structures; normalization", "= 0 if self.gnd_id is not None: if type(self.gnd_id) ==", ") if self.show_printmessages else None return None def get_gnd_data(self, data_selection:", "\"GndConnector connectivity error in get_gnd_data() method: could not load resource", "FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector __init__(): could not create default", "current connector object. continuing attempt to receive gnd data from", "(i.e. [\"type\", \"name\"]) (not yet implemented: can be a \"person\",", "# change found categorial string to selfdefined string (i.e. 'Person'", "is responding as expected.\" ) if self.show_printmessages else None return", "_ in enumerate(self.apilist)]: print( \"GndConnector print_complete_url() error: apiindex is not", "else: print( f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently not", "id {self.gnd_id} received.\" ) if self.show_printmessages else None else: print(", "this mapping dict) based on specific values (defined in the", "instance of Cache class for further processing or FileWriter class", "for i, _ in enumerate(self.apilist)]: print( \"GndConnector print_complete_url() error: apiindex", "assigned to 'person', if the raw data json object has", "_temp_data: _temp_data = _type selected_categories_data[category] = _temp_data return selected_categories_data #", "\"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\": [ \"type\", [\"str\"], \"categorial\",", "def connectivitycheck_loop(self) -> int: \"\"\"recursive connectivity check, checking every single", "return_complete_url() error: apiindex is not defined correctly. using default api...\"", "id number(s) apiindex: index of selected api in list defined", "value type (logic-wise: 'categorial' or 'nominal'), 4. a categorization dict,", "if _temp_categorial_values[_type] in _temp_data: _temp_data = _type selected_categories_data[category] = _temp_data", "self.apilist_filepath: str = os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\") try: self.apilist: Union[dict,", "str else mode # selected_categories_alias = category_sets.get(mode)[1] if type(mode) ==", "[\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\",", "{}, \"organizationAliases\": {}, }, { \"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\":", "by index value. returns 0 or -1 for unittest purposes\"\"\"", "= check_connectivity self.connection_established: bool = False self.remaining_apis_to_check: list = [i", "\"GndConnector: could not find gnd_apilist.json in config dir. creating file", "elif type(self.gnd_id) == list: _new_dict = {} for key in", "True: self.connectivitycheck_loop() else: print( \"GndConnector: initialization has been done without", "original value type (python-wise: i.e. 'str' or '[str]'), 3. the", "for further processing or FileWriter class to save it gnd_id:", "\"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\": [ \"type\", [\"str\"], \"categorial\", {", "execute connectivity check in __init__() or not (see connectivitycheck_loop()) show_printmessages:", "of type str, example 2: using lobid api the value", "= 0) -> int: \"\"\"print baseUrl string of the currently", "preset gnd_id_to_test value refers to Goethe\"\"\" try: result: dict =", "to connector object yet.\" ) if self.show_printmessages else None return", "be a list of one or more baseAliases (i.e. [\"type\",", "id {self.gnd_id} no data could be delivered by api\" )", "with selfdefined string (i.e. ['Person', 'PoliticalLeader'] to 'person') elif type(_temp_category_data_form)", "try: filereader = FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False ) _temp_data", "names, their value list denotates (in order of the list)", "default settings...\" ) if self.show_printmessages else None self.apilist: List[dict] =", "creating file with default settings...\" ) if self.show_printmessages else None", "[\"sameAs\", [{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\":", "print( \"GndConnector print_complete_url() error: apiindex is not defined correctly. using", "f\"GndConnector complete URL of gnd id number {index + 1}", "apis. gnd data queries can not be executed at the", "None] = FileReader( filepath=self.apilist_filepath, origin=\"local\", internal_call=True, show_printmessages=False ).loadfile_json() except FileNotFound:", "data is provided: \"type\", \"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\") can be", "of Cache class for further processing or FileWriter class to", "on runtime or not apilist_filepath: path to apilist config file", "self.show_printmessages else None elif self.connection_established == False: print( \"GndConnector connectivity", "default api...\" ) if self.show_printmessages else None self.apiindex = 0", "if _temp_data != None and _temp_data != False: result[self.gnd_id] =", "enumerate(self.apilist)] if self.check_connectivity == True: self.connectivitycheck_loop() else: print( \"GndConnector: initialization", "apiindex values, which have not been checked yet in connectivitycheck_loop()\"\"\"", "keys of this mapping dict) based on specific values (defined", "initialization has been done without connectivity check.\" ) if self.show_printmessages", "example 1: using culturegraph api the value of the base", "json, preset gnd_id_to_test value refers to Goethe\"\"\" try: result: dict", "bool = check_connectivity self.connection_established: bool = False self.remaining_apis_to_check: list =", "filtering def filter_received_data(gnd_id: str, mode: Union[str, List[str]]) -> dict: \"\"\"sub", "\"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\": [\"str\"]}], \"nominal\", ], }, \"personAliases\": {},", "json object has a key 'type' with a list as", "configuration data set, delivers a mapping to be able to", "normalize data from different apis, defines api`s url and aliases", "for key in result: _new_dict[key] = filter_received_data(key, data_selection) result =", "}, ] self.apiindex: int = 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json()", "== False: print( f\"GndConnector note: connections to apis have not", "except: print( f\"GndConnector get_gnd_data() status: for gnd id {index +", "specific aliases from base set) # defining sub method for", "object has a key '@type' with the value 'person' of", "-> None: \"\"\"establishes connection to api, from which norm data", "None self.apiindex = 0 if self.gnd_id is not None: if", "\"base\" (all baseAliases data is provided: \"type\", \"name\", \"furtherNames\", \"sameAs\",", "or unfiltered response json data as values data_selection: if delivered,", "a list of categories to get_gnd_data() only if they are", "[ \"type\", [\"str\"], \"categorial\", { \"person\": \"Person\", \"organisation\": \"CorporateBody\", \"place\":", "type(_temp_category_data_form) == list: for _type in _temp_categorial_values: if _temp_categorial_values[_type] in", "'str' or '[str]'), 3. the original value type (logic-wise: 'categorial'", "object yet.\" ) if self.show_printmessages else None return -1 def", "is not None: if type(self.gnd_id) == str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif", "status: for gnd id {self.gnd_id} no data could be delivered", "or not apilist_filepath: path to apilist config file apilist: list", "{len(self.gnd_id)} no data could be delivered by api\" ) if", "and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ): _temp_category_data_form", "filtering purposes (see get_gnd_data()) connection_established: data from an api has", "1: using culturegraph api the value of the base category", "selected_categories_alias = category_sets.get(mode)[1] if type(mode) == str else 'baseAliases' #", "{index + 1} in passed gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" )", "+ 1} ({gnd}) of {len(self.gnd_id)} no data could be delivered", "responding as expected.\" ) if self.show_printmessages else None return -1", "], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, }, {", "else None self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i for i,", "are used for this filtering process: the keys of 'baseAliases'", "raw data json object has a key 'type' with a", "the possibility to filter results, a dict is created, having", "except: print( \"GndConnector connectivity error in get_gnd_data() method: could not", "1} ({gnd}) of {len(self.gnd_id)} no data could be delivered by", "False: result[self.gnd_id] = _temp_data print( f\"GndConnector get_gnd_data() status: data for", "origin=\"web\", internal_call=True, show_printmessages=False ) _temp_data = filereader.loadfile_json() except: print( \"GndConnector", "filereader.loadfile_json() except: print( \"GndConnector connectivity error in get_gnd_data() method: could", "self.show_printmessages else None return None result = {} if type(self.gnd_id)", "FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False ) _temp_data = filereader.loadfile_json() except:", "for filtering def filter_received_data(gnd_id: str, mode: Union[str, List[str]]) -> dict:", "else None def connectivitycheck_single(self, index_to_test: int, gnd_id_to_test: str = \"118540238\")", "\"postprocessing\", \"gnd_apilist.json\") try: self.apilist: Union[dict, None] = FileReader( filepath=self.apilist_filepath, origin=\"local\",", "_type selected_categories_data[category] = _temp_data return selected_categories_data # executing sub method", ") if self.show_printmessages else None # handling of categorical data", "enumerate(self.apilist)]: print( \"GndConnector print_complete_url() error: apiindex is not defined correctly.", "def return_complete_url(self, index: int = 0) -> Union[str, None]: \"\"\"return", "{self.apilist[self.apiindex]['name']} api...\" ) if self.show_printmessages else None elif self.connection_established ==", "\"sameAs\", \"pseudonyms\") can be a list of one or more", "the available apis. gnd data queries can not be executed", "and aliases for filtering purposes (see get_gnd_data()) connection_established: data from", "data for gnd id {self.gnd_id} received.\" ) if self.show_printmessages else", "gnd id {self.gnd_id} received.\" ) if self.show_printmessages else None else:", "[\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\": [", "True # filtering: build new dict with selected values, which", "in __init__() or not (see connectivitycheck_loop()) show_printmessages: show class internal", "_type in _temp_categorial_values: if _temp_categorial_values[_type] in _temp_data: _temp_data = _type", "list of categories to get_gnd_data() only if they are defined", "error: none of the listed apis is responding as expected.\"", "in enumerate(self.apilist)] if self.check_connectivity == True: self.connectivitycheck_loop() else: print( \"GndConnector:", "in result: _new_dict[key] = filter_received_data(key, data_selection) result = _new_dict return", "receive data from api with the possibility to filter results,", "None def get_gnd_data(self, data_selection: Union[str, List[str], None] = None) ->", "self.show_printmessages else None def connectivitycheck_single(self, index_to_test: int, gnd_id_to_test: str =", "type(self.gnd_id) == list: _new_dict = {} for key in result:", "mapping information stored in self.apilist) can be \"base\" (all baseAliases", "_ in enumerate(self.apilist)]: print( \"GndConnector return_complete_url() error: apiindex is not", "used for this filtering process: the keys of 'baseAliases' dict", "values (defined in the values of this mapping dict) found", ") if self.show_printmessages else None elif self.connection_established == False: print(", "could not load resource from api as expected.\" ) if", "keys of 'baseAliases' dict define the new key names, their", "> 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict)", "the raw data json object has a key 'type' with", "else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print( \"GndConnector connectivity check error:", "remaining_apis_to_check: list of apiindex values, which have not been checked", "list: print( f\"GndConnector complete URL of gnd id number {index", "no data could be delivered by api\" ) if self.show_printmessages", "get_gnd_data()) connection_established: data from an api has already been received", "unittest purposes\"\"\" if self.apiindex not in [i for i, _", "== str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) == list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index])", "return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector return_complete_url() internal error: no gnd", "receive gnd data from {self.apilist[self.apiindex]['name']} api...\" ) if self.show_printmessages else", "apilist: list of dicts as configuration data set, delivers a", "mapping is provided in self.apilist) \"\"\" if self.check_connectivity == False:", "is not defined correctly. using default api...\" ) if self.show_printmessages", ") if self.show_printmessages else None return -1 def print_complete_url(self, index:", "-1 for unittest purposes\"\"\" if self.check_connectivity == False: self.check_connectivity ==", "_temp_data != None and _temp_data != False: result[self.gnd_id] = _temp_data", "self.show_printmessages else None return None self.connection_established = True if _temp_data", "'baseAliases' # => allow parsing a list of categories to", "self.check_connectivity == False: self.check_connectivity == True if len(self.remaining_apis_to_check) > 0:", "for this filtering process: the keys of 'baseAliases' dict define", "else None result[gnd] = _temp_data print( f\"GndConnector get_gnd_data() status: gnd", "GndConnector..\") if show_printmessages else None self.show_printmessages: bool = show_printmessages self.gnd_id:", "setting self.apiindex to the value of those api, which is", "[\"type\", \"name\"]) (not yet implemented: can be a \"person\", \"place\",", "of self.gnd_id (list or str) selected by index value\"\"\" if", "_temp_categorial_values[_type] in _temp_data: _temp_data = _type selected_categories_data[category] = _temp_data return", "'baseAliases' dict define the new key names, their value list", "note: connections to apis have not been checked yet. to", "\"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"],", "if mode == \"base\" else mode selected_categories_data = {} for", "else: print( f\"GndConnector get_gnd_data() status: for gnd id {self.gnd_id} no", "None]: \"\"\"method to receive data from api with the possibility", "\"categorial\", { \"person\": \"person\", \"organisation\": \"organisation\", \"place\": \"place\", }, ],", "a \"person\", \"place\", \"organization\" or a custom string refering to", "check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently not responding as expected. checking", "connector object yet.\" ) if self.show_printmessages else None return None", "successfully. returns 0 or -1 for unittest purposes\"\"\" if self.check_connectivity", "selected by index value\"\"\" if self.apiindex not in [i for", "in _temp_categorial_values: if _temp_categorial_values[_type] in _temp_data: _temp_data = _type selected_categories_data[category]", "= _temp_data print( f\"GndConnector get_gnd_data() status: data for gnd id", "achieved with the help of key-value mapping information stored in", "elif self.connection_established == False: print( \"GndConnector connectivity error: after connectivity", "if self.show_printmessages else None return None elif type(self.gnd_id) == list:", "code and checks if response data type is json, preset", "which has itself a value 'Person' of type str in", "self.connection_established = True return 0 else: print( f\"GndConnector connectivity check:", "def filter_received_data(gnd_id: str, mode: Union[str, List[str]]) -> dict: \"\"\"sub method,", "apis is responding as expected.\" ) if self.show_printmessages else None", ").loadfile_json() except FileNotFound: print( \"GndConnector: could not find gnd_apilist.json in", "else mode selected_categories_data = {} for category in selected_categories: _temp_data", "by index value\"\"\" if self.apiindex not in [i for i,", "list of apiindex values, which have not been checked yet", "selected api in list defined in self.apilist check_connectivity: execute connectivity", "show_printmessages else None self.show_printmessages: bool = show_printmessages self.gnd_id: Union[str, List[str],", "be able to normalize data from different apis, defines api`s", "False: print( \"GndConnector connectivity error: after connectivity check no connection", "by renaming keys and re-sorting data from different keys from", "else None # handling of categorical data types if (", "elif type(self.gnd_id) == list: for index, gnd in enumerate(self.gnd_id): _temp_data", "checked yet in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if show_printmessages else None", "assign a category (defined keys of this mapping dict) based", "\"place\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"],", "== \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1]", "else None self.check_connectivity: bool = check_connectivity self.connection_established: bool = False", "return_complete_url(self, index: int = 0) -> Union[str, None]: \"\"\"return baseUrl", "not (see connectivitycheck_loop()) show_printmessages: show class internal printmessages on runtime", "a single api`s (from self.apilist) response status code and checks", "return True return False def connectivitycheck_loop(self) -> int: \"\"\"recursive connectivity", "data is achieved with the help of key-value mapping information", "\"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\": \"str\"}], \"nominal\"],", "from typing import Union, List from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter", "alias definitions in self.apilist are used for this filtering process:", "and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values =", "True if len(self.remaining_apis_to_check) > 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print(", "# selected_categories_alias = category_sets.get(mode)[1] if type(mode) == str else 'baseAliases'", "of type str in it, mode parameter accepts str 'base'", "False: self.check_connectivity == True if len(self.remaining_apis_to_check) > 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0])", "the raw data into new keys (purpose: json data delivered", "id {index + 1} ({gnd}) of {len(self.gnd_id)} no data could", "_temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found categorial", "or a custom string refering to a user-defined set of", "generated by renaming keys and re-sorting data from different keys", "id numbers as keys and filtered or unfiltered response json", "\"GndConnector print_complete_url() internal error: no gnd id number has been", "status: gnd id {index + 1} ({gnd}) of {len(self.gnd_id)} processed\"", "[{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"pseudonym\", [{\"preferredName\": \"str\"}], \"nominal\", ],", "os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\") try: self.apilist: Union[dict, None] = FileReader(", "FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True, ) _temp_data = filereader.loadfile_json() except:", "for i, _ in enumerate(self.apilist)]: print( \"GndConnector return_complete_url() error: apiindex", "bool = True, show_printmessages: bool = True, ) -> None:", "for unittest purposes\"\"\" if self.check_connectivity == False: self.check_connectivity == True", "type (python-wise: i.e. 'str' or '[str]'), 3. the original value", "(base mode = all base aliases from apilist definition. list", "== str: _new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id) ==", "== list: for index, gnd in enumerate(self.gnd_id): _temp_data = {}", "of the available apis. gnd data queries can not be", "new key names, their value list denotates (in order of", "be delivered by api\" ) if self.show_printmessages else None result[gnd]", "from apilist definition. list mode = select specific aliases from", "\"pseudonyms\") can be a list of one or more baseAliases", "been checked yet. to do so manually execute connectivitycheck_loop() method", "{ \"person\": \"Person\", \"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\", }, ], \"name\":", "been received or not remaining_apis_to_check: list of apiindex values, which", "id number and renames the keys and/or values. alias definitions", "mode = select specific aliases from base set) # defining", "in list defined in self.apilist check_connectivity: execute connectivity check in", "category_sets.get(mode)[1] if type(mode) == str else 'baseAliases' # => allow", "if self.show_printmessages else None def connectivitycheck_single(self, index_to_test: int, gnd_id_to_test: str", "string (i.e. ['Person', 'PoliticalLeader'] to 'person') elif type(_temp_category_data_form) == list:", "type is json, preset gnd_id_to_test value refers to Goethe\"\"\" try:", "and _temp_data != False: result[self.gnd_id] = _temp_data print( f\"GndConnector get_gnd_data()", "select specific aliases from base set) # defining sub method", "\"GndConnector return_complete_url() error: apiindex is not defined correctly. using default", "for gnd id {self.gnd_id} received.\" ) if self.show_printmessages else None", "selected_categories_list = category_sets.get(mode)[0] if type(mode) == str else mode #", "re-sorting data from different keys from the raw data into", "{self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently not responding as expected. checking for", "list of one or more baseAliases (i.e. [\"type\", \"name\"]) (not", "-> int: \"\"\"recursive connectivity check, checking every single api in", "passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" ) if self.show_printmessages else", "\"type\": [ \"@type\", \"str\", \"categorial\", { \"person\": \"person\", \"organisation\": \"organisation\",", "__init__(): could not create default gnd_apilist.json in config folder.\" )", "categorical data types if ( len(_temp_data) > 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2]", "handle additional alias definition sets in gnd_apilist.json by user #", "established to any of the available apis. gnd data queries", "config file apilist: list of dicts as configuration data set,", "all base aliases from apilist definition. list mode = select", "self.show_printmessages else None return None def get_gnd_data(self, data_selection: Union[str, List[str],", "using culturegraph api the value of the base category 'type'", "\"placeAliases\": {}, \"organizationAliases\": {}, }, { \"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\",", "delivered by api\" ) if self.show_printmessages else None result[gnd] =", "= gnd_id self.apiindex: int = apiindex self.apilist_filepath: str = os.path.join(local_save_path,", "connectivity check error: none of the listed apis is responding", "apiindex: index of selected api in list defined in self.apilist", "else None self.show_printmessages: bool = show_printmessages self.gnd_id: Union[str, List[str], None]", "{}, }, { \"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\":", "get_gnd_data() status: gnd id {index + 1} ({gnd}) of {len(self.gnd_id)}", "connectivity check, checking every single api in self.apilist (ascending) and", "type(self.gnd_id) == str: _new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id)", "an instance of Cache class for further processing or FileWriter", "if len(self.remaining_apis_to_check) > 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print( f\"GndConnector:", "[\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"pseudonym\",", "connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" ) if self.show_printmessages else None", "print( f\"GndConnector get_gnd_data() status: for gnd id {index + 1}", "for which the mapping is provided in self.apilist) \"\"\" if", "print( \"GndConnector return_complete_url() internal error: no gnd id number has", "\"organisation\": \"organisation\", \"place\": \"place\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"],", "True, show_printmessages: bool = True, ) -> None: \"\"\"establishes connection", "or not remaining_apis_to_check: list of apiindex values, which have not", "3. the original value type (logic-wise: 'categorial' or 'nominal'), 4.", "complete URL of gnd id number {index + 1} in", "connection could has been established to any of the available", "\"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"pseudonym\", [{\"preferredName\":", "else: print( \"GndConnector return_complete_url() internal error: no gnd id number", "if type(result) == dict: return True return False def connectivitycheck_loop(self)", "makedir_if_necessary from tei_entity_enricher.util.exceptions import FileNotFound class GndConnector: def __init__( self,", "of apiindex values, which have not been checked yet in", "__init__() or not (see connectivitycheck_loop()) show_printmessages: show class internal printmessages", "{'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # } #", "by api\" ) if self.show_printmessages else None return None elif", "processing or FileWriter class to save it gnd_id: gnd id", "values, which have not been checked yet in connectivitycheck_loop()\"\"\" print(\"initializing", "= {} try: filereader = FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True,", "of key-value mapping information stored in self.apilist) can be \"base\"", "if self.check_connectivity == False: print( f\"GndConnector note: connections to apis", "of this mapping dict) based on specific values (defined in", "which extracts the key-value pairs from the raw data received", "define the new key names, their value list denotates (in", "of one or more baseAliases (i.e. [\"type\", \"name\"]) (not yet", "has a key 'type' with a list as a value,", "0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector __init__(): could", "else None self.apiindex = 0 if self.gnd_id is not None:", "print( f\"GndConnector get_gnd_data() status: data for gnd id {self.gnd_id} received.\"", "allow parsing a list of categories to get_gnd_data() only if", "(all base aliases will be extracted) or a list of", "data, example 1: using culturegraph api the value of the", "it, mode parameter accepts str 'base' (all base aliases will", "in self.apilist, formatted with a gnd id number of self.gnd_id", "internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except: return False if type(result) == dict:", "\"type\": [ \"type\", [\"str\"], \"categorial\", { \"person\": \"Person\", \"organisation\": \"CorporateBody\",", "): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found", "= False self.remaining_apis_to_check: list = [i for i, _ in", "try: result: dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json()", "mapping dict) found in raw data, example 1: using culturegraph", "single api`s (from self.apilist) response status code and checks if", "[ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\": [\"str\"]}], \"nominal\", ], }, \"personAliases\":", "Union[dict, None]: \"\"\"method to receive data from api with the", "\"organisation\": \"CorporateBody\", \"place\": \"PlaceOrGeographicName\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"],", "if self.show_printmessages else None elif type(self.gnd_id) == list: print( f\"GndConnector", "check, checking every single api in self.apilist (ascending) and setting", "# handling of categorical data types if ( len(_temp_data) >", "str: for _type in _temp_categorial_values: if _temp_data == _temp_categorial_values[_type]: _temp_data", "different keys from the raw data into new keys (purpose:", "origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except: return False if type(result) ==", "method of connectivitycheck_loop(), checks a single api`s (from self.apilist) response", "'categorial' or 'nominal'), 4. a categorization dict, if the original", "if ( len(_temp_data) > 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and", "if self.show_printmessages else None return None def get_gnd_data(self, data_selection: Union[str,", "mode # selected_categories_alias = category_sets.get(mode)[1] if type(mode) == str else", "f\"GndConnector get_gnd_data() status: data for gnd id {self.gnd_id} received.\" )", "_new_dict = {} for key in result: _new_dict[key] = filter_received_data(key,", "== True else None self.check_connectivity: bool = check_connectivity self.connection_established: bool", "None: if type(self.gnd_id) == str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) ==", "True if _temp_data != None and _temp_data != False: result[self.gnd_id]", "process: the keys of 'baseAliases' dict define the new key", "filereader = FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True, ) _temp_data =", "= _temp_data print( f\"GndConnector get_gnd_data() status: gnd id {index +", "set, delivers a mapping to be able to normalize data", "{list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id) == list: _new_dict = {}", "information for {gnd_id} in raw data. continuing processing...\" ) if", "defined in self.apilist, formatted with a gnd id number of", "URL of gnd id number {index + 1} in passed", "f\"GndConnector note: connections to apis have not been checked yet.", "normalized output is generated by renaming keys and re-sorting data", "object. continuing attempt to receive gnd data from {self.apilist[self.apiindex]['name']} api...\"", "tei_entity_enricher.util.exceptions import FileNotFound class GndConnector: def __init__( self, gnd_id: Union[str,", "create default gnd_apilist.json in config folder.\" ) if self.show_printmessages ==", "yet implemented: can be a \"person\", \"place\", \"organization\" or a", "# defining sub method for filtering def filter_received_data(gnd_id: str, mode:", "created, having gnd id numbers as keys and filtered or", "get_gnd_data(self, data_selection: Union[str, List[str], None] = None) -> Union[dict, None]:", "print( f\"GndConnector get_gnd_data() status: gnd id {index + 1} ({gnd})", "{index + 1} ({gnd}) of {len(self.gnd_id)} processed\" ) if self.show_printmessages", "self.show_printmessages else None self.apilist: List[dict] = [ { \"name\": \"culturegraph\",", "list: for _type in _temp_categorial_values: if _temp_categorial_values[_type] in _temp_data: _temp_data", "output is generated by renaming keys and re-sorting data from", "only if they are defined in baseAlias set? base_categories =", "self.connection_established == False: print( \"GndConnector connectivity error: after connectivity check", "str 'base' (all base aliases will be extracted) or a", "None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print( \"GndConnector connectivity check error: none", "of str (specific aliases will be extracted)\"\"\" # todo: handle", "to pass the check successfully. returns 0 or -1 for", "'[str]'), 3. the original value type (logic-wise: 'categorial' or 'nominal'),", "dict with selected values, which should be returned (base mode", "filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector __init__(): could not create default gnd_apilist.json", "[] try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data = []", "processing...\" ) if self.show_printmessages else None # handling of categorical", "or -1 for unittest purposes\"\"\" if self.check_connectivity == False: self.check_connectivity", ") if self.show_printmessages else None self.apilist: List[dict] = [ {", "if self.show_printmessages else None self.apilist: List[dict] = [ { \"name\":", "keys and filtered or unfiltered response json data as values", "False if type(result) == dict: return True return False def", "bool: \"\"\"auxiliary method of connectivitycheck_loop(), checks a single api`s (from", "self.remaining_apis_to_check: list = [i for i, _ in enumerate(self.apilist)] if", "show_printmessages=False ) _temp_data = filereader.loadfile_json() except: print( \"GndConnector connectivity error", "for gnd id {self.gnd_id} no data could be delivered by", "keys and/or values. alias definitions in self.apilist are used for", "False def connectivitycheck_loop(self) -> int: \"\"\"recursive connectivity check, checking every", "= {} for key in result: _new_dict[key] = filter_received_data(key, data_selection)", "number and renames the keys and/or values. alias definitions in", "bool = False self.remaining_apis_to_check: list = [i for i, _", "with a gnd id number of self.gnd_id (list or str)", "\"organization\" or a custom string refering to a user-defined set", "\"\"\"recursive connectivity check, checking every single api in self.apilist (ascending)", "from api for one gnd id number and renames the", "yet.\" ) if self.show_printmessages else None return -1 def return_complete_url(self,", "the original key name, 2. the original value type (python-wise:", "it delivers mapping information to assign a category (defined keys", "method of the current connector object. continuing attempt to receive", "found categorial list with selfdefined string (i.e. ['Person', 'PoliticalLeader'] to", "None def connectivitycheck_single(self, index_to_test: int, gnd_id_to_test: str = \"118540238\") ->", "api in self.apilist (ascending) and setting self.apiindex to the value", "= FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True, ) _temp_data = filereader.loadfile_json()", "== True if len(self.remaining_apis_to_check) > 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True:", "FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except: return False if", "gnd id number has been passed to connector object yet.\"", "[\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\":", "passed gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages else None", "index, gnd in enumerate(self.gnd_id): _temp_data = {} try: filereader =", "None self.check_connectivity: bool = check_connectivity self.connection_established: bool = False self.remaining_apis_to_check:", "parameter accepts str 'base' (all base aliases will be extracted)", "category_sets.get(mode)[0] if type(mode) == str else mode # selected_categories_alias =", "if self.show_printmessages else None return -1 def return_complete_url(self, index: int", "connector object. continuing attempt to receive gnd data from {self.apilist[self.apiindex]['name']}", "different key-value-structures; normalization of this data is achieved with the", "for filtering if data_selection is not None: if type(self.gnd_id) ==", "None] = None) -> Union[dict, None]: \"\"\"method to receive data", "elif type(self.gnd_id) == list: print( f\"GndConnector complete URL of gnd", "selected_categories: _temp_data = [] try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError:", "could not find gnd_apilist.json in config dir. creating file with", "string to selfdefined string (i.e. 'Person' to 'person') if type(_temp_category_data_form)", "show_printmessages=True, ) _temp_data = filereader.loadfile_json() except: print( f\"GndConnector get_gnd_data() status:", "delivered by api\" ) if self.show_printmessages else None return None", "api as expected.\" ) if self.show_printmessages else None return None", "= True if _temp_data != None and _temp_data != False:", "defined in baseAlias set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories", "keys (purpose: json data delivered by different apis comes in", "is provided: \"type\", \"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\") can be a", "to any of the available apis. gnd data queries can", "connection_established: data from an api has already been received or", "in selected_categories: _temp_data = [] try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except", "help of key-value mapping information stored in self.apilist) can be", "_temp_data == _temp_categorial_values[_type]: _temp_data = _type # replace found categorial", "_ in enumerate(self.apilist)] self.connection_established = True return 0 else: print(", "if self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print( \"GndConnector connectivity", "-> Union[str, None]: \"\"\"return baseUrl string of the currently selected", "if show_printmessages else None self.show_printmessages: bool = show_printmessages self.gnd_id: Union[str,", "connectivity check passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" ) if", "URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages else None elif type(self.gnd_id) ==", "execute connectivitycheck_loop() method of the current connector object. continuing attempt", "type str in it, mode parameter accepts str 'base' (all", "False: print( f\"GndConnector note: connections to apis have not been", "'custom'] # } # selected_categories_list = category_sets.get(mode)[0] if type(mode) ==", "find gnd_apilist.json in config dir. creating file with default settings...\"", "further processing or FileWriter class to save it gnd_id: gnd", "_temp_categorial_values[_type]: _temp_data = _type # replace found categorial list with", "(not yet implemented: can be a \"person\", \"place\", \"organization\" or", "None: \"\"\"establishes connection to api, from which norm data for", "definitions in self.apilist are used for this filtering process: the", "print( \"GndConnector print_complete_url() internal error: no gnd id number has", "[i for i, _ in enumerate(self.apilist)] self.connection_established = True return", "else None return None result = {} if type(self.gnd_id) ==", "\"place\": \"place\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\",", "= select specific aliases from base set) # defining sub", "base category 'type' is assigned to 'person', if the raw", "and checks if response data type is json, preset gnd_id_to_test", "if self.gnd_id is not None: if type(self.gnd_id) == str: print(", "gnd in enumerate(self.gnd_id): _temp_data = {} try: filereader = FileReader(", "type(self.gnd_id) == str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) == list: return", "if they are defined in baseAlias set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys())", "original value type logic-wise is 'categorial': it delivers mapping information", "self.check_connectivity == True: self.connectivitycheck_loop() else: print( \"GndConnector: initialization has been", "gnd_apilist.json in config folder.\" ) if self.show_printmessages == True else", "None return None def get_gnd_data(self, data_selection: Union[str, List[str], None] =", "\"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\":", "returns 0 or -1 for unittest purposes\"\"\" if self.apiindex not", "number(s) apiindex: index of selected api in list defined in", "[list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # } # selected_categories_list = category_sets.get(mode)[0] if type(mode)", "\"baseAliases\": { \"type\": [ \"@type\", \"str\", \"categorial\", { \"person\": \"person\",", "(ascending) and setting self.apiindex to the value of those api,", "f\"GndConnector get_gnd_data() filtering note: could not find {category} information for", "value 'person' of type str, example 2: using lobid api", "else None else: print( f\"GndConnector get_gnd_data() status: for gnd id", "data could be delivered by api\" ) if self.show_printmessages else", "(from self.apilist) response status code and checks if response data", "renaming keys and re-sorting data from different keys from the", "if type(mode) == str else 'baseAliases' # => allow parsing", "# } # selected_categories_list = category_sets.get(mode)[0] if type(mode) == str", "category 'type' is assigned to 'person', if the raw data", "if type(self.gnd_id) == str: _temp_data = {} try: filereader =", "dir. creating file with default settings...\" ) if self.show_printmessages else", "self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print( f\"GndConnector: connectivity check passed, connection to", "having gnd id numbers as keys and filtered or unfiltered", "extracted)\"\"\" # todo: handle additional alias definition sets in gnd_apilist.json", "key 'type' with a list as a value, which has", "= [] try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data =", "of the base category 'type' is assigned to 'person', if", "responding as expected. checking for alternatives...\" ) if self.show_printmessages else", "filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id) == list: _new_dict = {} for", "is generated by renaming keys and re-sorting data from different", "def get_gnd_data(self, data_selection: Union[str, List[str], None] = None) -> Union[dict,", "self.apiindex: int = 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print(", "name, 2. the original value type (python-wise: i.e. 'str' or", "None]: \"\"\"return baseUrl string of the currently selected api defined", "filtering note: could not find {category} information for {gnd_id} in", "numbers as keys and filtered or unfiltered response json data", "the value of the base category 'type' is assigned to", "_temp_data = filereader.loadfile_json() except: print( f\"GndConnector get_gnd_data() status: for gnd", "key names, their value list denotates (in order of the", "data from {self.apilist[self.apiindex]['name']} api...\" ) if self.show_printmessages else None elif", "'type' with a list as a value, which has itself", "\"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}],", "if self.show_printmessages == True else None self.check_connectivity: bool = check_connectivity", "\"organisation\", \"place\": \"place\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\":", "return None result = {} if type(self.gnd_id) == str: _temp_data", "if _temp_data == _temp_categorial_values[_type]: _temp_data = _type # replace found", "== dict) ): _temp_category_data_form = self.apilist[self.apiindex][\"baseAliases\"][category][1] _temp_categorial_values = self.apilist[self.apiindex][\"baseAliases\"][category][3] #", "results, a dict is created, having gnd id numbers as", "gnd_id: gnd id number(s) apiindex: index of selected api in", "for filtering purposes (see get_gnd_data()) connection_established: data from an api", "List[dict] = [ { \"name\": \"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": {", "the check successfully. returns 0 or -1 for unittest purposes\"\"\"", "_temp_data = _type selected_categories_data[category] = _temp_data return selected_categories_data # executing", "print( f\"GndConnector complete URL of gnd id number {index +", "None return -1 def print_complete_url(self, index: int = 0) ->", "result[self.gnd_id] = _temp_data print( f\"GndConnector get_gnd_data() status: data for gnd", "show_printmessages self.gnd_id: Union[str, List[str], None] = gnd_id self.apiindex: int =", "is currently not responding as expected. checking for alternatives...\" )", "currently selected api defined in self.apilist, formatted with a gnd", "be executed at the moment.\" ) if self.show_printmessages else None", "executed at the moment.\" ) if self.show_printmessages else None return", "the moment.\" ) if self.show_printmessages else None return None result", "None] = gnd_id self.apiindex: int = apiindex self.apilist_filepath: str =", "set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories if mode ==", "of the current connector object. continuing attempt to receive gnd", "in different key-value-structures; normalization of this data is achieved with", "values, which should be returned (base mode = all base", "gnd id number of self.gnd_id (list or str) selected by", "to assign a category (defined keys of this mapping dict)", "self.apilist) can be \"base\" (all baseAliases data is provided: \"type\",", "os from typing import Union, List from tei_entity_enricher.interface.postprocessing.io import FileReader,", "if self.show_printmessages else None self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i", "filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True, show_printmessages=self.show_printmessages, ).loadfile_json() except: return False if type(result)", "categorial string to selfdefined string (i.e. 'Person' to 'person') if", ") if self.show_printmessages else None elif type(self.gnd_id) == list: print(", "1. the original key name, 2. the original value type", "comes in different key-value-structures; normalization of this data is achieved", "self.apilist check_connectivity: execute connectivity check in __init__() or not (see", "else 'baseAliases' # => allow parsing a list of categories", "in _temp_data: _temp_data = _type selected_categories_data[category] = _temp_data return selected_categories_data", "of {len(self.gnd_id)} processed\" ) if self.show_printmessages else None self.connection_established =", "or str) selected by index value\"\"\" if self.apiindex not in", "sets in gnd_apilist.json by user # category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()),", "be passed to an instance of Cache class for further", "if type(self.gnd_id) == str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) == list:", "str, example 2: using lobid api the value of the", "self.remaining_apis_to_check = [i for i, _ in enumerate(self.apilist)] self.connection_established =", "\"person\", \"organisation\": \"organisation\", \"place\": \"place\", }, ], \"name\": [\"preferredName\", \"str\",", "selected api defined in self.apilist, formatted with a gnd id", "url and aliases for filtering purposes (see get_gnd_data()) connection_established: data", "1} in passed gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages", "in self.apilist (ascending) and setting self.apiindex to the value of", "not None: if type(self.gnd_id) == str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id)", "!= None and _temp_data != False: result[self.gnd_id] = _temp_data print(", "data for entities of Deutsche Nationalbibliothek´s database is retrieved, loaded", "else None self.apilist: List[dict] = [ { \"name\": \"culturegraph\", \"baseUrl\":", "str = \"118540238\") -> bool: \"\"\"auxiliary method of connectivitycheck_loop(), checks", "else None self.connection_established = True # filtering: build new dict", "gnd_id self.apiindex: int = apiindex self.apilist_filepath: str = os.path.join(local_save_path, \"config\",", "baseAliases data is provided: \"type\", \"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\") can", "id number {index + 1} in passed gnd id list:", "connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if show_printmessages else None self.show_printmessages: bool =", "internal error: no gnd id number has been passed to", "check no connection could has been established to any of", "value list denotates (in order of the list) 1. the", "except FileNotFound: print( \"GndConnector: could not find gnd_apilist.json in config", "(in order of the list) 1. the original key name,", "\"\"\"method to receive data from api with the possibility to", "in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if show_printmessages else None self.show_printmessages: bool", "checks a single api`s (from self.apilist) response status code and", "\"person\", \"place\", \"organization\" or a custom string refering to a", "i, _ in enumerate(self.apilist)]: print( \"GndConnector print_complete_url() error: apiindex is", "if self.show_printmessages else None self.apiindex = 0 if self.gnd_id is", "_temp_categorial_values: if _temp_data == _temp_categorial_values[_type]: _temp_data = _type # replace", "available apis. gnd data queries can not be executed at", "int = 0) -> int: \"\"\"print baseUrl string of the", "as configuration data set, delivers a mapping to be able", "with a list as a value, which has itself a", "print_complete_url() internal error: no gnd id number has been passed", "selected_categories_data[category] = _temp_data return selected_categories_data # executing sub method for", "in raw data. continuing processing...\" ) if self.show_printmessages else None", "id {index + 1} ({gnd}) of {len(self.gnd_id)} processed\" ) if", "as expected. checking for alternatives...\" ) if self.show_printmessages else None", "'categorial': it delivers mapping information to assign a category (defined", ") if self.show_printmessages else None def connectivitycheck_single(self, index_to_test: int, gnd_id_to_test:", "[i for i, _ in enumerate(self.apilist)]: print( \"GndConnector return_complete_url() error:", "type logic-wise is 'categorial': it delivers mapping information to assign", "= \"118540238\") -> bool: \"\"\"auxiliary method of connectivitycheck_loop(), checks a", "specific values (defined in the values of this mapping dict)", "get_gnd_data() method: could not load resource from api as expected.\"", "data_selection: Union[str, List[str], None] = None) -> Union[dict, None]: \"\"\"method", "\"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\", [{\"id\": \"str\"}],", "data as values data_selection: if delivered, a normalized output is", "show_printmessages=False ).loadfile_json() except FileNotFound: print( \"GndConnector: could not find gnd_apilist.json", "based on specific values (defined in the values of this", "apis, defines api`s url and aliases for filtering purposes (see", "from tei_entity_enricher.util.exceptions import FileNotFound class GndConnector: def __init__( self, gnd_id:", "'Person' of type str in it, mode parameter accepts str", "data into new keys (purpose: json data delivered by different", "Union[str, List[str], None] = gnd_id self.apiindex: int = apiindex self.apilist_filepath:", "if self.gnd_id is not None: if type(self.gnd_id) == str: return", "which should be returned (base mode = all base aliases", "or more baseAliases (i.e. [\"type\", \"name\"]) (not yet implemented: can", "\"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\": [ \"type\", [\"str\"], \"categorial\", { \"person\":", "index value\"\"\" if self.apiindex not in [i for i, _", "method: could not load resource from api as expected.\" )", "result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data = [] print( f\"GndConnector get_gnd_data() filtering", "# 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # } # selected_categories_list = category_sets.get(mode)[0]", "] self.apiindex: int = 0 try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except:", "a list of one or more baseAliases (i.e. [\"type\", \"name\"])", "keys from the raw data into new keys (purpose: json", "result: _new_dict[key] = filter_received_data(key, data_selection) result = _new_dict return result", "check_connectivity self.connection_established: bool = False self.remaining_apis_to_check: list = [i for", "load resource from api as expected.\" ) if self.show_printmessages else", "connectivity check no connection could has been established to any", ") if self.show_printmessages else None return None elif type(self.gnd_id) ==", "type(result) == dict: return True return False def connectivitycheck_loop(self) ->", "this data is achieved with the help of key-value mapping", "number {index + 1} in passed gnd id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\"", "self.apilist) \"\"\" if self.check_connectivity == False: print( f\"GndConnector note: connections", "= [i for i, _ in enumerate(self.apilist)] self.connection_established = True", "makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector __init__(): could not create", "selected_categories_data # executing sub method for filtering if data_selection is", "filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False ) _temp_data = filereader.loadfile_json() except: print(", "\"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\": [\"str\"]}], \"nominal\", ], },", "return 0 else: print( \"GndConnector print_complete_url() internal error: no gnd", "for one gnd id number and renames the keys and/or", "by different apis comes in different key-value-structures; normalization of this", "alias definition sets in gnd_apilist.json by user # category_sets =", "gnd_apilist.json by user # category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], #", "to {self.apilist[self.remaining_apis_to_check[0]]['name']} api established.\" ) if self.show_printmessages else None self.apiindex", "2. the original value type (python-wise: i.e. 'str' or '[str]'),", "Union[str, List[str]]) -> dict: \"\"\"sub method, which extracts the key-value", "== True: print( f\"GndConnector: connectivity check passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']}", "{} try: filereader = FileReader( filepath=self.return_complete_url(index), origin=\"web\", internal_call=True, show_printmessages=True, )", "= self.apilist[self.apiindex][\"baseAliases\"][category][3] # change found categorial string to selfdefined string", "({gnd}) of {len(self.gnd_id)} no data could be delivered by api\"", "of Deutsche Nationalbibliothek´s database is retrieved, loaded data can be", "yet.\" ) if self.show_printmessages else None return None def get_gnd_data(self,", "[{\"forename\": [\"str\"], \"surname\": [\"str\"]}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\":", "elif type(self.gnd_id) == list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else: print( \"GndConnector return_complete_url()", "class to save it gnd_id: gnd id number(s) apiindex: index", "else None return 0 else: print( \"GndConnector print_complete_url() internal error:", "= FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False ) _temp_data = filereader.loadfile_json()", "connectivitycheck_loop(self) -> int: \"\"\"recursive connectivity check, checking every single api", "raw data, example 1: using culturegraph api the value of", "filtering: build new dict with selected values, which should be", "list defined in self.apilist check_connectivity: execute connectivity check in __init__()", "by api\" ) if self.show_printmessages else None result[gnd] = _temp_data", "self.connectivitycheck_loop() else: print( \"GndConnector connectivity check error: none of the", "the raw data json object has a key '@type' with", "import FileNotFound class GndConnector: def __init__( self, gnd_id: Union[str, List[str],", "(see get_gnd_data()) connection_established: data from an api has already been", "'type' is assigned to 'person', if the raw data json", "those api, which is first to pass the check successfully.", "class internal printmessages on runtime or not apilist_filepath: path to", "not responding as expected. checking for alternatives...\" ) if self.show_printmessages", "gnd id {self.gnd_id} no data could be delivered by api\"", "able to normalize data from different apis, defines api`s url", "handling of categorical data types if ( len(_temp_data) > 0", "self.apilist, formatted with a gnd id number of self.gnd_id (list", "a list as a value, which has itself a value", "= self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i for i, _ in enumerate(self.apilist)]", "connectivity error in get_gnd_data() method: could not load resource from", "or str) selected by index value. returns 0 or -1", "yet in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\") if show_printmessages else None self.show_printmessages:", "in enumerate(self.apilist)]: print( \"GndConnector return_complete_url() error: apiindex is not defined", "enumerate(self.apilist)]: print( \"GndConnector return_complete_url() error: apiindex is not defined correctly.", "complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages else None elif type(self.gnd_id)", ") _temp_data = filereader.loadfile_json() except: print( \"GndConnector connectivity error in", "api the value of the base category 'type' is assigned", "passed to connector object yet.\" ) if self.show_printmessages else None", "\"placeAliases\": {}, \"organizationAliases\": {}, }, ] self.apiindex: int = 0", "dict: \"\"\"sub method, which extracts the key-value pairs from the", "def print_complete_url(self, index: int = 0) -> int: \"\"\"print baseUrl", "_new_dict = {list(result.keys())[0]: filter_received_data(self.gnd_id, data_selection)} elif type(self.gnd_id) == list: _new_dict", "\"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\") can be a list of one", "get_gnd_data() status: for gnd id {self.gnd_id} no data could be", "types if ( len(_temp_data) > 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\"", "(see connectivitycheck_loop()) show_printmessages: show class internal printmessages on runtime or", "api in list defined in self.apilist check_connectivity: execute connectivity check", "0) -> Union[str, None]: \"\"\"return baseUrl string of the currently", "FileReader, FileWriter from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary from tei_entity_enricher.util.exceptions import", "retrieved, loaded data can be passed to an instance of", "delivered by different apis comes in different key-value-structures; normalization of", "None self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i for i, _", "None elif self.connection_established == False: print( \"GndConnector connectivity error: after", "response status code and checks if response data type is", "are defined in baseAlias set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories =", "dict: return True return False def connectivitycheck_loop(self) -> int: \"\"\"recursive", "value. returns 0 or -1 for unittest purposes\"\"\" if self.apiindex", "not load resource from api as expected.\" ) if self.show_printmessages", "\"organizationAliases\": {}, }, { \"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": {", "{}, \"organizationAliases\": {}, }, ] self.apiindex: int = 0 try:", "raw data. continuing processing...\" ) if self.show_printmessages else None #", "int: \"\"\"recursive connectivity check, checking every single api in self.apilist", "\"\"\" if self.check_connectivity == False: print( f\"GndConnector note: connections to", "== list: print( f\"GndConnector complete URL of gnd id number", "( len(_temp_data) > 0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3]", "f\"GndConnector __init__(): could not create default gnd_apilist.json in config folder.\"", "show_printmessages: show class internal printmessages on runtime or not apilist_filepath:", "if response data type is json, preset gnd_id_to_test value refers", "data received from api for one gnd id number and", "\"config\", \"postprocessing\", \"gnd_apilist.json\") try: self.apilist: Union[dict, None] = FileReader( filepath=self.apilist_filepath,", "value of those api, which is first to pass the", "except: print( f\"GndConnector __init__(): could not create default gnd_apilist.json in", "apilist definition. list mode = select specific aliases from base", "settings...\" ) if self.show_printmessages else None self.apilist: List[dict] = [", "[\"str\"]}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {},", "import Union, List from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter from tei_entity_enricher.util.helper", "\"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {}, }, { \"name\": \"lobid\",", "{} try: filereader = FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False )", "\"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"], \"sameAs\": [\"sameAs\",", "sub method for filtering if data_selection is not None: if", "self.gnd_id is not None: if type(self.gnd_id) == str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id)", "found categorial string to selfdefined string (i.e. 'Person' to 'person')", "from api as expected.\" ) if self.show_printmessages else None return", "\"CorporateBody\", \"place\": \"PlaceOrGeographicName\", }, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\":", "baseUrl string of the currently selected api defined in self.apilist,", "import os from typing import Union, List from tei_entity_enricher.interface.postprocessing.io import", "_temp_data = filereader.loadfile_json() except: print( \"GndConnector connectivity error in get_gnd_data()", "different apis, defines api`s url and aliases for filtering purposes", "manually execute connectivitycheck_loop() method of the current connector object. continuing", "enumerate(self.gnd_id): _temp_data = {} try: filereader = FileReader( filepath=self.return_complete_url(index), origin=\"web\",", "[\"str\"], \"surname\": [\"str\"]}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {},", "for entities of Deutsche Nationalbibliothek´s database is retrieved, loaded data", "str else 'baseAliases' # => allow parsing a list of", "self.show_printmessages else None return -1 def print_complete_url(self, index: int =", "selected by index value. returns 0 or -1 for unittest", "\"str\"}], \"nominal\", ], }, \"personAliases\": {}, \"placeAliases\": {}, \"organizationAliases\": {},", "List[str]]) -> dict: \"\"\"sub method, which extracts the key-value pairs", "keys, for which the mapping is provided in self.apilist) \"\"\"", "print( f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently not responding", "status: for gnd id {index + 1} ({gnd}) of {len(self.gnd_id)}", "self.show_printmessages else None self.connection_established = True # filtering: build new", "been established to any of the available apis. gnd data", "}, ], \"name\": [\"preferredName\", \"str\", \"nominal\"], \"furtherNames\": [\"variantName\", [\"str\"], \"nominal\"],", "i.e. 'str' or '[str]'), 3. the original value type (logic-wise:", "try: makedir_if_necessary(os.path.dirname(self.apilist_filepath)) FileWriter(data=self.apilist, filepath=self.apilist_filepath).writefile_json() except: print( f\"GndConnector __init__(): could not", "value, which has itself a value 'Person' of type str", "in baseAlias set? base_categories = list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories if", "_temp_data = [] print( f\"GndConnector get_gnd_data() filtering note: could not", "dicts as configuration data set, delivers a mapping to be", "}, { \"name\": \"lobid\", \"baseUrl\": \"http://lobid.org/gnd/{}\", \"baseAliases\": { \"type\": [", "None self.connection_established = True # filtering: build new dict with", "data from an api has already been received or not", "-1 def print_complete_url(self, index: int = 0) -> int: \"\"\"print", "number has been passed to connector object yet.\" ) if", "\"GndConnector connectivity error: after connectivity check no connection could has", "for gnd id {index + 1} ({gnd}) of {len(self.gnd_id)} no", "and filtered or unfiltered response json data as values data_selection:", "= True return 0 else: print( f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']}", "== str: for _type in _temp_categorial_values: if _temp_data == _temp_categorial_values[_type]:", "= {} try: filereader = FileReader( filepath=self.return_complete_url(), origin=\"web\", internal_call=True, show_printmessages=False", "values data_selection: if delivered, a normalized output is generated by", "to 'person') elif type(_temp_category_data_form) == list: for _type in _temp_categorial_values:", "try: _temp_data = result[gnd_id][self.apilist[self.apiindex][\"baseAliases\"][category][0]] except KeyError: _temp_data = [] print(", "with the help of key-value mapping information stored in self.apilist)", "None self.show_printmessages: bool = show_printmessages self.gnd_id: Union[str, List[str], None] =", "as values data_selection: if delivered, a normalized output is generated", "'person', if the raw data json object has a key", "(purpose: json data delivered by different apis comes in different", "KeyError: _temp_data = [] print( f\"GndConnector get_gnd_data() filtering note: could", "\"GndConnector print_complete_url() error: apiindex is not defined correctly. using default", "definition sets in gnd_apilist.json by user # category_sets = {'base':", "raw data json object has a key '@type' with the", "string of the currently selected api defined in self.apilist, formatted", "or '[str]'), 3. the original value type (logic-wise: 'categorial' or", "[list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # } # selected_categories_list", "list(self.apilist[self.apiindex][\"baseAliases\"].keys()) selected_categories = base_categories if mode == \"base\" else mode", "a user-defined set of keys, for which the mapping is", "self.gnd_id: Union[str, List[str], None] = gnd_id self.apiindex: int = apiindex", "data. continuing processing...\" ) if self.show_printmessages else None # handling", "one or more baseAliases (i.e. [\"type\", \"name\"]) (not yet implemented:", "checking every single api in self.apilist (ascending) and setting self.apiindex", "type(self.gnd_id) == list: for index, gnd in enumerate(self.gnd_id): _temp_data =", "keys and re-sorting data from different keys from the raw", "index of selected api in list defined in self.apilist check_connectivity:", "# replace found categorial list with selfdefined string (i.e. ['Person',", "{len(self.gnd_id)} processed\" ) if self.show_printmessages else None self.connection_established = True", "\"base\" else mode selected_categories_data = {} for category in selected_categories:", "'nominal'), 4. a categorization dict, if the original value type", "\"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\": [ \"@type\", \"str\", \"categorial\", { \"person\":", "to save it gnd_id: gnd id number(s) apiindex: index of", "not be executed at the moment.\" ) if self.show_printmessages else", "status: data for gnd id {self.gnd_id} received.\" ) if self.show_printmessages", "0 and self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ):", "\"type\", \"name\", \"furtherNames\", \"sameAs\", \"pseudonyms\") can be a list of", "if self.show_printmessages else None elif self.connection_established == False: print( \"GndConnector", "a dict is created, having gnd id numbers as keys", "\"\"\"return baseUrl string of the currently selected api defined in", "apiindex self.apilist_filepath: str = os.path.join(local_save_path, \"config\", \"postprocessing\", \"gnd_apilist.json\") try: self.apilist:", "self.show_printmessages: bool = show_printmessages self.gnd_id: Union[str, List[str], None] = gnd_id", "will be extracted) or a list of str (specific aliases", "\"culturegraph\", \"baseUrl\": \"https://hub.culturegraph.org/entityfacts/{}\", \"baseAliases\": { \"type\": [ \"@type\", \"str\", \"categorial\",", "the values of this mapping dict) found in raw data,", "could not find {category} information for {gnd_id} in raw data.", "is assigned to 'person', if the raw data json object", "checking for alternatives...\" ) if self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop()", "to api, from which norm data for entities of Deutsche", "connectivitycheck_single(self, index_to_test: int, gnd_id_to_test: str = \"118540238\") -> bool: \"\"\"auxiliary", "the key-value pairs from the raw data received from api", "gnd data queries can not be executed at the moment.\"", "internal printmessages on runtime or not apilist_filepath: path to apilist", "Deutsche Nationalbibliothek´s database is retrieved, loaded data can be passed", "file apilist: list of dicts as configuration data set, delivers", "return 0 else: print( f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is", "list of str (specific aliases will be extracted)\"\"\" # todo:", "= all base aliases from apilist definition. list mode =", "self.apiindex not in [i for i, _ in enumerate(self.apilist)]: print(", "None elif type(self.gnd_id) == list: print( f\"GndConnector complete URL of", "0, check_connectivity: bool = True, show_printmessages: bool = True, )", "enumerate(self.apilist)] self.connection_established = True return 0 else: print( f\"GndConnector connectivity", "the original value type logic-wise is 'categorial': it delivers mapping", "type(self.gnd_id) == list: print( f\"GndConnector complete URL of gnd id", "values of this mapping dict) found in raw data, example", "True: print( f\"GndConnector: connectivity check passed, connection to {self.apilist[self.remaining_apis_to_check[0]]['name']} api", "into new keys (purpose: json data delivered by different apis", "else: print( \"GndConnector: initialization has been done without connectivity check.\"", "can be \"base\" (all baseAliases data is provided: \"type\", \"name\",", "[] print( f\"GndConnector get_gnd_data() filtering note: could not find {category}", "is 'categorial': it delivers mapping information to assign a category", "if data_selection is not None: if type(self.gnd_id) == str: _new_dict", "str: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id) elif type(self.gnd_id) == list: return self.apilist[self.apiindex][\"baseUrl\"].format(self.gnd_id[index]) else:", "which have not been checked yet in connectivitycheck_loop()\"\"\" print(\"initializing GndConnector..\")", "the keys and/or values. alias definitions in self.apilist are used", "for i, _ in enumerate(self.apilist)] self.connection_established = True return 0", "connection to api, from which norm data for entities of", "self.show_printmessages else None elif type(self.gnd_id) == list: print( f\"GndConnector complete", "is retrieved, loaded data can be passed to an instance", "database is retrieved, loaded data can be passed to an", "print_complete_url(self, index: int = 0) -> int: \"\"\"print baseUrl string", "print( f\"GndConnector get_gnd_data() filtering note: could not find {category} information", "Nationalbibliothek´s database is retrieved, loaded data can be passed to", "\"GndConnector: initialization has been done without connectivity check.\" ) if", "a custom string refering to a user-defined set of keys,", "print( \"GndConnector: could not find gnd_apilist.json in config dir. creating", "of connectivitycheck_loop(), checks a single api`s (from self.apilist) response status", "not apilist_filepath: path to apilist config file apilist: list of", "a mapping to be able to normalize data from different", "internal_call=True, show_printmessages=False ) _temp_data = filereader.loadfile_json() except: print( \"GndConnector connectivity", "id list: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id[index])}\" ) if self.show_printmessages else None return 0", ") if self.show_printmessages else None self.remaining_apis_to_check.remove(self.remaining_apis_to_check[0]) self.connectivitycheck_loop() else: print( \"GndConnector", "delivers a mapping to be able to normalize data from", "provided in self.apilist) \"\"\" if self.check_connectivity == False: print( f\"GndConnector", "not remaining_apis_to_check: list of apiindex values, which have not been", "None elif type(self.gnd_id) == list: for index, gnd in enumerate(self.gnd_id):", "purposes\"\"\" if self.check_connectivity == False: self.check_connectivity == True if len(self.remaining_apis_to_check)", "base set) # defining sub method for filtering def filter_received_data(gnd_id:", ") if self.show_printmessages else None self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check =", "object yet.\" ) if self.show_printmessages else None return None def", "if self.apiindex not in [i for i, _ in enumerate(self.apilist)]:", "no gnd id number has been passed to connector object", "f\"GndConnector get_gnd_data() status: for gnd id {index + 1} ({gnd})", "'@type' with the value 'person' of type str, example 2:", "filereader.loadfile_json() except: print( f\"GndConnector get_gnd_data() status: for gnd id {index", "= True, ) -> None: \"\"\"establishes connection to api, from", "data queries can not be executed at the moment.\" )", "self.show_printmessages else None else: print( f\"GndConnector get_gnd_data() status: for gnd", "self.apiindex = self.remaining_apis_to_check[0] self.remaining_apis_to_check = [i for i, _ in", "[i for i, _ in enumerate(self.apilist)]: print( \"GndConnector print_complete_url() error:", "connector object yet.\" ) if self.show_printmessages else None return -1", "[{\"id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"variantNameEntityForThePerson\", [{\"forename\": [\"str\"], \"surname\": [\"str\"]}],", "f\"GndConnector connectivity check: {self.apilist[self.remaining_apis_to_check[0]]['name']} api is currently not responding as", "api for one gnd id number and renames the keys", "category_sets = {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] #", "unfiltered response json data as values data_selection: if delivered, a", "0 else: print( \"GndConnector print_complete_url() internal error: no gnd id", "show_printmessages=self.show_printmessages, ).loadfile_json() except: return False if type(result) == dict: return", "result = {} if type(self.gnd_id) == str: _temp_data = {}", "else None return None self.connection_established = True if _temp_data !=", "denotates (in order of the list) 1. the original key", "to 'person', if the raw data json object has a", "_type in _temp_categorial_values: if _temp_data == _temp_categorial_values[_type]: _temp_data = _type", "print( f\"GndConnector complete URL: {self.apilist[self.apiindex]['baseUrl'].format(self.gnd_id)}\" ) if self.show_printmessages else None", "'person' of type str, example 2: using lobid api the", "gnd id number and renames the keys and/or values. alias", ") if self.show_printmessages else None else: print( f\"GndConnector get_gnd_data() status:", "if self.show_printmessages else None return 0 else: print( \"GndConnector print_complete_url()", "to Goethe\"\"\" try: result: dict = FileReader( filepath=self.apilist[index_to_test][\"baseUrl\"].format(gnd_id_to_test), origin=\"web\", internal_call=True,", "if self.show_printmessages else None return None self.connection_established = True if", "gnd_id_to_test: str = \"118540238\") -> bool: \"\"\"auxiliary method of connectivitycheck_loop(),", "typing import Union, List from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter from", "self.apilist[self.apiindex][\"baseAliases\"][category][2] == \"categorial\" and type(self.apilist[self.apiindex][\"baseAliases\"][category][3] == dict) ): _temp_category_data_form =", "\"sameAs\": [\"sameAs\", [{\"@id\": \"str\"}], \"nominal\"], \"pseudonyms\": [ \"pseudonym\", [{\"preferredName\": \"str\"}],", "mode: Union[str, List[str]]) -> dict: \"\"\"sub method, which extracts the", "= {'base': [list(self.apilist[self.apiindex][\"baseAliases\"].keys()), 'baseAliases'], # 'custom': [list(self.apilist[self.apiindex][\"custom\"].keys()), 'custom'] # }", "method for filtering if data_selection is not None: if type(self.gnd_id)", "of the list) 1. the original key name, 2. the", ") if self.show_printmessages else None result[gnd] = _temp_data print( f\"GndConnector", "None return None self.connection_established = True if _temp_data != None", "> 0: if self.connectivitycheck_single(self.remaining_apis_to_check[0]) == True: print( f\"GndConnector: connectivity check", "int = 0, check_connectivity: bool = True, show_printmessages: bool =" ]
[ "<filename>neslter/workflow/__init__.py import logging logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) logger.level = logging.DEBUG" ]
[ "parse_args, get_aligner, get_bbox def render(aligner, bbox, z): aligner.total_bbox = bbox", "args = parse_args(parser) a = get_aligner(args) bbox = get_bbox(args) for", "== '__main__': parser = get_argparser() args = parse_args(parser) a =", "bbox, z): aligner.total_bbox = bbox aligner.zs = z aligner.render_section_all_mips(z, bbox)", "if __name__ == '__main__': parser = get_argparser() args = parse_args(parser)", "= get_argparser() args = parse_args(parser) a = get_aligner(args) bbox =", "aligner.render_section_all_mips(z, bbox) if __name__ == '__main__': parser = get_argparser() args", "bbox aligner.zs = z aligner.render_section_all_mips(z, bbox) if __name__ == '__main__':", "'__main__': parser = get_argparser() args = parse_args(parser) a = get_aligner(args)", "= get_bbox(args) for z in range(args.bbox_start[2], args.bbox_stop[2]): print('Rendering z={0}'.format(z)) render(a,", "render(aligner, bbox, z): aligner.total_bbox = bbox aligner.zs = z aligner.render_section_all_mips(z,", "args import get_argparser, parse_args, get_aligner, get_bbox def render(aligner, bbox, z):", "bbox = get_bbox(args) for z in range(args.bbox_start[2], args.bbox_stop[2]): print('Rendering z={0}'.format(z))", "= get_aligner(args) bbox = get_bbox(args) for z in range(args.bbox_start[2], args.bbox_stop[2]):", "<filename>inference/_archive/render_section.py from args import get_argparser, parse_args, get_aligner, get_bbox def render(aligner,", "z aligner.render_section_all_mips(z, bbox) if __name__ == '__main__': parser = get_argparser()", "aligner.zs = z aligner.render_section_all_mips(z, bbox) if __name__ == '__main__': parser", "get_bbox def render(aligner, bbox, z): aligner.total_bbox = bbox aligner.zs =", "bbox) if __name__ == '__main__': parser = get_argparser() args =", "for z in range(args.bbox_start[2], args.bbox_stop[2]): print('Rendering z={0}'.format(z)) render(a, bbox, z)", "= z aligner.render_section_all_mips(z, bbox) if __name__ == '__main__': parser =", "parse_args(parser) a = get_aligner(args) bbox = get_bbox(args) for z in", "def render(aligner, bbox, z): aligner.total_bbox = bbox aligner.zs = z", "= bbox aligner.zs = z aligner.render_section_all_mips(z, bbox) if __name__ ==", "get_bbox(args) for z in range(args.bbox_start[2], args.bbox_stop[2]): print('Rendering z={0}'.format(z)) render(a, bbox,", "get_argparser, parse_args, get_aligner, get_bbox def render(aligner, bbox, z): aligner.total_bbox =", "a = get_aligner(args) bbox = get_bbox(args) for z in range(args.bbox_start[2],", "get_argparser() args = parse_args(parser) a = get_aligner(args) bbox = get_bbox(args)", "get_aligner(args) bbox = get_bbox(args) for z in range(args.bbox_start[2], args.bbox_stop[2]): print('Rendering", "import get_argparser, parse_args, get_aligner, get_bbox def render(aligner, bbox, z): aligner.total_bbox", "from args import get_argparser, parse_args, get_aligner, get_bbox def render(aligner, bbox,", "aligner.total_bbox = bbox aligner.zs = z aligner.render_section_all_mips(z, bbox) if __name__", "= parse_args(parser) a = get_aligner(args) bbox = get_bbox(args) for z", "__name__ == '__main__': parser = get_argparser() args = parse_args(parser) a", "get_aligner, get_bbox def render(aligner, bbox, z): aligner.total_bbox = bbox aligner.zs", "parser = get_argparser() args = parse_args(parser) a = get_aligner(args) bbox", "z): aligner.total_bbox = bbox aligner.zs = z aligner.render_section_all_mips(z, bbox) if" ]
[ "call options for each method. If not specified, the default", "Cancelling the operation renders the instance immediately unreadable via the", "progress of preparing the new instance. The instance name is", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about a particular instance", "any existing policy. Authorization requires ``spanner.instances.setIamPolicy`` on ``resource``. Example: >>>", "# transport methods, wrapped with `wrap_method` to add retry, #", "the parameters are invalid. \"\"\" # Wrap the transport method", "- The instance is readable via the API, with all", "2.0 (the \"License\"); # you may not use this file", "metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse`", "self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information", "of this request: - The instance is readable via the", "actual callables which invoke the proper # transport methods, wrapped", "in the instance's allocation has been requested, billing is based", "the requested levels). - Databases can be created in the", "RPC # from the client configuration. # (Ordinarily, these are", "used to set user options on the client. API Endpoint", "configuration. # (Ordinarily, these are the defaults specified in the", "default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is", "can be used to track creation of the instance. The", "to add retry and timeout logic. if \"delete_instance\" not in", "# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "ValueError: If the parameters are invalid. \"\"\" # Wrap the", "instance's state becomes ``READY``. The returned ``long-running operation`` will have", "request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT,", "isolation: problems with databases in one instance will not affect", "policy is being specified. See the operation documentation for the", "in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout,", "not in self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry,", "instance immediately unreadable via the API. - The instance can", "for which the policy is being specified. See the operation", "See the operation documentation for the appropriate value for this", "given project. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "import empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class", "parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `instance_id`: >>>", "instance_id, instance) >>> >>> def callback(operation_future): ... # Handle result.", "Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError:", "per-page, this determines the maximum number of resources in a", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists the supported instance configurations for", "self._inner_api_calls[ \"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, )", "in one instance will not affect other instances. However, within", "a user-agent string along with API requests. If ``None``, then", "the client will attempt to ascertain the credentials from the", "also iterate over the pages of the response using its", "set through client_options. \"\"\" # Raise deprecation warnings for things", "not in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry,", "both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials", "to go away. if client_config is not None: warnings.warn( \"The", "protocol. This argument may also be a callable which returns", "pass >>> >>> >>> # Alternatively: >>> >>> # Iterate", "specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client", "return iterator def get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "Client options used to set user options on the client.", "TODO: Initialize `resource`: >>> resource = '' >>> >>> #", "transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport instance, responsible for", "Iterate over all results >>> for element in client.list_instances(parent): ...", "``GetIamPolicy``. This field is only used by Cloud IAM. If", "instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create. The name", "google.api_core.operation import google.api_core.operations_v1 import google.api_core.page_iterator import google.api_core.path_template import grpc from", "storage resources to be used by Cloud Spanner databases. Each", "failed for any reason. google.api_core.exceptions.RetryError: If the request failed due", "... pass >>> >>> >>> # Alternatively: >>> >>> #", "if type(client_options) == dict: client_options = google.api_core.client_options.from_dict( client_options ) if", "client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info", "the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict,", "be of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions`", "operation = self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic(", "of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless", "info used to send a user-agent string along with API", "are returned. If a dict is provided, it must be", "default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask ) if", "databases* immediately and irrevocably disappear from the API. All data", "argument is mutually exclusive with ``credentials``; providing both will raise", "and timeout logic. if \"get_instance_config\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\"", "client_info=None, client_options=None, ): \"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]):", "this client using the provided credentials file. Args: filename (str):", "retried using a default configuration. timeout (Optional[float]): The amount of", "License for the specific language governing permissions and # limitations", "only need to set this if you're developing your own", ">>> field_mask = {} >>> >>> response = client.update_instance(instance, field_mask)", "# deserialization and actually sending data to the service. if", "return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs( self,", "retry and timeout logic. if \"list_instances\" not in self._inner_api_calls: self._inner_api_calls[", "of permissions. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "# TODO: Initialize `permissions`: >>> permissions = [] >>> >>>", "policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Sets the access control", "\"env\" and the value of the label contains the string", "filters are: - ``name:*`` --> The instance has a name.", "\"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The default address of the service.\"\"\"", "the operation renders the instance immediately unreadable via the API.", "of requests and consumes most of the instance resources, fewer", "account private key json file. args: Additional arguments to pass", "per- resource, this parameter does not affect the return value.", "out the default settings for retry and timeout for each", "self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def get_iam_policy( self, resource,", "be used to create, delete, modify and list instances. Instances", "TODO: Initialize `field_mask`: >>> field_mask = {} >>> >>> response", "being specified. See the operation documentation for the appropriate value", "of permissions to check for the ``resource``. Permissions with wildcards", "for things we want to go away. if client_config is", "import google.api_core.path_template import grpc from google.cloud.spanner_admin_instance_v1.gapic import enums from google.cloud.spanner_admin_instance_v1.gapic", "transport. # The transport is responsible for handling serialization and", "use of the instance (though there may be additional network", "Parse out the default settings for retry and timeout for", "not in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry,", "service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file", "US-central, Europe). Configurations are created by Google based on resource", "All newly-reserved resources are available for serving the instance's tables.", "determines the maximum number of resources in a page. filter_", "The instance has the label \"env\". - ``labels.env:dev`` --> The", "operation: - Cancelling the operation renders the instance immediately unreadable", "= self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation,", "google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\" Cloud", ">>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata =", "particular instance. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "routing_header = [(\"name\", name)] except AttributeError: pass else: routing_metadata =", "parent = client.project_path('[PROJECT]') >>> >>> # Iterate over all results", ">>> for element in client.list_instances(parent): ... # process element ...", "non-existent Cloud Spanner instance resource will result in a NOT_FOUND", "- ``name`` - ``display_name`` - ``labels.key`` where key is the", "For example, if one database in an instance receives a", "of the request: - Billing ceases for all of the", "- Cancelling the operation sets its metadata's ``cancel_time``, and begins", "on the containing Google Cloud Project. Otherwise returns an empty", "(str): REQUIRED: The resource for which the policy is being", "each other. For example, if one database in an instance", "instance name is assigned by the caller. If the named", "= google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.TestIamPermissionsRequest(", "retry and timeout logic. if \"list_instance_configs\" not in self._inner_api_calls: self._inner_api_calls[", "not in self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry,", "like. self._inner_api_calls = {} # Service calls def create_instance( self,", "modify the instance are rejected. - Reading the instance via", "list_instances( self, parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "to be deleted. Values are of the form ``projects/<project>/instances/<instance>`` retry", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1", "in a page. filter_ (str): An expression for filtering the", "The instance to create. The name may be omitted, but", "must be of the same form as the protobuf message", "the appropriate value for this field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED:", "of the policy is limited to a few 10s of", "If page streaming is performed per- resource, this parameter does", "(google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials", "to a few 10s of KB. An empty policy is", "service. if transport: if callable(transport): self.transport = transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport,", "A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can", "via the API. - The instance's state becomes ``READY``. The", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"](", "filename, *args, **kwargs): \"\"\"Creates an instance of this client using", "and the like. self._inner_api_calls = {} # Service calls def", "a policy set. Authorization requires ``spanner.instances.getIamPolicy`` on ``resource``. Example: >>>", "the project for which a list of supported instance configurations", "Values are of the form ``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If", "instance = {} >>> >>> response = client.create_instance(parent, instance_id, instance)", "google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceRequest( name=name,", "these are the defaults specified in the `*_config.py` # file", "a label Some examples of using filters are: - ``name:*``", "invalid. \"\"\" # Wrap the transport method to add retry", ">>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize", "(str): REQUIRED: The resource for which the policy detail is", "in client.list_instances(parent).pages: ... for element in page: ... # process", "can be deleted. - All other attempts to modify the", "on the newly-requested level. Until completion of the returned operation:", "that instance, and their performance may suffer. \"\"\" SERVICE_ADDRESS =", "OF ANY KIND, either express or implied. # See the", "upon completion of this request: - The instance is readable", "instances in the given project. Example: >>> from google.cloud import", "label contains the string \"dev\". - ``name:howl labels.env:dev`` --> The", "in self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout,", "See the License for the specific language governing permissions and", "google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import", "self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", )", "These are the actual callables which invoke the proper #", "to in writing, software # distributed under the License is", "for actually making the API calls. The default transport uses", ">>> >>> # TODO: Initialize `instance`: >>> instance = {}", "... for element in page: ... # process element ...", "which to create the instance. Values are of the form", "Instance Admin API The Cloud Spanner Instance Admin API can", ") if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION )", "but no allocated resources. Its state is ``CREATING``. Until completion", "is being requested. See the operation documentation for the appropriate", "or agreed to in writing, software # distributed under the", "the instance. If the named instance does not exist, returns", "self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info,", "and must be between 2 and 64 characters in length.", "by Cloud IAM. If a dict is provided, it must", "self._inner_api_calls[ \"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, )", "resource ``name``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "The instance has the label \"env\" and the value of", "argument is deprecated.\", PendingDeprecationWarning, stacklevel=2, ) else: client_config = instance_admin_client_config.config", "\"\"\" Cloud Spanner Instance Admin API The Cloud Spanner Instance", "calls def create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "from_service_account_file @classmethod def instance_path(cls, project, instance): \"\"\"Return a fully-qualified instance", "# TODO: Initialize `resource`: >>> resource = '' >>> >>>", "``spanner.instances.setIamPolicy`` on ``resource``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "your own client library. client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client options used", "obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0", "default transport uses the gRPC protocol. This argument may also", "there are no additional per-database or per-operation charges for use", "specifies the subset of ``Instance`` fields that should be returned.", "compliance with the License. # You may obtain a copy", "permissions) Args: resource (str): REQUIRED: The resource for which the", "This is the key used to # find the method", "DEPRECATED. A dictionary of call options for each method. If", "databases in that instance, and their performance may suffer. \"\"\"", "operation: - Billing begins for all successfully-allocated resources (some types", "After an instance exists, there are no additional per-database or", "except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata)", "proper # transport methods, wrapped with `wrap_method` to add retry,", "instance's reserved resources. Soon afterward: - The instance and *all", "\"env\" with its value containing \"dev\". retry (Optional[google.api_core.retry.Retry]): A retry", "as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object", "Gets the access control policy for an instance resource. Returns", "``labels.env:*`` --> The instance has the label \"env\". - ``labels.env:dev``", "Each instance has a \"configuration\", which dictates where the serving", ") # Save a dictionary of cached API call functions.", "\"Received both a transport instance and \" \"credentials; these are", "through which to make calls. This argument is mutually exclusive", "allocated resources. Its state is ``CREATING``. Until completion of the", "returns a transport instance. Callables will be sent the credentials", "The set of permissions to check for the ``resource``. Permissions", "begins preparing it to begin serving. The returned ``long-running operation``", "operation: - Billing for all successfully-allocated resources begins (some types", "metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return", "name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args:", "for which a list of instances is requested. Values are", ">>> metadata = response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The", "settings for retry and timeout for each RPC # from", "def get_instance( self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask ) if metadata is None: metadata =", "not use this file except in compliance with the License.", "use \" \"`transport` instead.\", PendingDeprecationWarning, stacklevel=2, ) api_endpoint = self.SERVICE_ADDRESS", "resources as requested. The returned ``long-running operation`` can be used", "may be additional network bandwidth charges). Instances offer isolation: problems", "# Instantiate the transport. # The transport is responsible for", ") def get_iam_policy( self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "retry=retry, timeout=timeout, metadata=metadata ) def get_iam_policy( self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT,", "specified in the `*_config.py` # file next to this one.)", "request, retry=retry, timeout=timeout, metadata=metadata ) def set_iam_policy( self, resource, policy,", "``long-running operation`` will have a name of the format ``<instance_name>/operations/<operation_id>``", "you may not use this file except in compliance with", "Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError:", "immediately unreadable via the API. - The instance can be", "object used to retry requests. If ``None`` is specified, requests", "= {} >>> >>> response = client.update_instance(instance, field_mask) >>> >>>", "always include the instance name. Otherwise, only fields mentioned in", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is None: metadata", "field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present, specifies the subset", "for the appropriate value for this field. permissions (list[str]): The", "metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If", "can be used to track the progress of updating the", "not in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry,", "class InstanceAdminClient(object): \"\"\" Cloud Spanner Instance Admin API The Cloud", "project=project ) def __init__( self, transport=None, channel=None, credentials=None, client_config=None, client_info=None,", "be sent the credentials as the first argument and the", "API. - The instance's state becomes ``READY``. The returned ``long-running", ") request = spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask ) if metadata is", "of the form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A retry object used", "try: routing_header = [(\"name\", name)] except AttributeError: pass else: routing_metadata", "is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send", "instance is readable via the API, with all requested attributes", "for this field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object", "raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach", "of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry", "any future fields in ``Instance`` from being erased accidentally by", "of preparing the new instance. The instance name is assigned", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout,", "used to create, delete, modify and list instances. Instances are", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request =", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_", "check for the ``resource``. Permissions with wildcards (such as '*'", "instance) >>> >>> def callback(operation_future): ... # Handle result. ...", "= [] metadata = list(metadata) try: routing_header = [(\"name\", name)]", "deprecated; use \" \"`transport` instead.\", PendingDeprecationWarning, stacklevel=2, ) api_endpoint =", "return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def list_instances( self,", "add retry, # timeout, and the like. self._inner_api_calls = {}", "operation`` will have a name of the format ``<instance_name>/operations/<operation_id>`` and", "credentials file. Args: filename (str): The path to the service", "policy is limited to a few 10s of KB. An", "add retry and timeout logic. if \"set_iam_policy\" not in self._inner_api_calls:", "request = spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask ) if metadata is None:", "to above. - ``labels.env:*`` --> The instance has the label", "begins restoring resources to their pre-request values. The operation is", "\"test_iam_permissions\" not in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions,", "client_config (dict): DEPRECATED. A dictionary of call options for each", "updated. The field mask must always be specified; this prevents", "the constructor. Returns: InstanceAdminClient: The constructed client. \"\"\" credentials =", "serialization and # deserialization and actually sending data to the", "successfully-allocated resources begins (some types may have lower than the", "documentation for the appropriate value for this field. permissions (list[str]):", "completion of the request: - Billing ceases for all of", "the project for which a list of instances is requested.", "def list_instance_configs( self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"]( request,", "determines the maximum number of resources in a page. retry", "and it has the label \"env\" with its value containing", "API call functions. # These are the actual callables which", "type is ``Instance``, if successful. Authorization requires ``spanner.instances.update`` permission on", "iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata, ),", "is assigned by the caller. If the named instance already", "self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info,", "a lot of requests and consumes most of the instance", "specifying which fields in ``Instance`` should be updated. The field", "might reject them. If a dict is provided, it must", "form ``projects/<project>``. instance_id (str): Required. The ID of the instance", "import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> #", "- The instance's state becomes ``READY``. The returned ``long-running operation``", "this parameter does not affect the return value. If page", "are the defaults specified in the `*_config.py` # file next", "request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance,", "client. This is the key used to # find the", "parent (str): Required. The name of the project for which", "the instances that exist and their sizes. After an instance", "test_iam_permissions( self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Returns", "completion of the returned operation: - Cancelling the operation sets", "\" \"credentials; these are mutually exclusive.\" ) self.transport = transport", "An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can also iterate over", ">>> parent = client.project_path('[PROJECT]') >>> >>> # Iterate over all", "--> The instance's name contains \"howl\" and it has the", "Admin API can be used to create, delete, modify and", "begin serving. The returned ``long-running operation`` can be used to", "If ``None`` is specified, requests will be retried using a", "{} >>> >>> # TODO: Initialize `field_mask`: >>> field_mask =", "the maximum number of resources in a page. filter_ (str):", "attributes but no allocated resources. Its state is ``CREATING``. Until", "fully-qualified instance string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance, ) @classmethod", "not None: warnings.warn( \"The `client_config` argument is deprecated.\", PendingDeprecationWarning, stacklevel=2,", "the provided credentials file. Args: filename (str): The path to", "-*- # # Copyright 2020 Google LLC # # Licensed", "the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Returns permissions that the caller", "are of the form ``projects/<project>``. instance_id (str): Required. The ID", "contains the string \"howl\". - ``name:HOWL`` --> Equivalent to above.", "retry and timeout logic. if \"get_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[", "for all of the instance's reserved resources. Soon afterward: -", "\"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport instance,", "= transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else: if credentials: raise", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request, retry=retry,", "``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present, specifies the", "add retry and timeout logic. if \"get_iam_policy\" not in self._inner_api_calls:", "you're developing your own client library. client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client", "field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\" Cloud Spanner Instance", "its value containing \"dev\". retry (Optional[google.api_core.retry.Retry]): A retry object used", "\"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request", "self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) if client_info is", "``display_name`` - ``labels.key`` where key is the name of a", "caller has on the specified instance resource. Attempting this RPC", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"]( request,", "google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Updates an instance, and begins allocating", "begins allocating or releasing resources as requested. The returned ``long-running", "request: - Billing ceases for all of the instance's reserved", "# These are the actual callables which invoke the proper", "its databases* immediately and irrevocably disappear from the API. All", "may also be a callable which returns a transport instance.", "specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str,", "client.update_instance(instance, field_mask) >>> >>> def callback(operation_future): ... # Handle result.", "project for which a list of instances is requested. Values", "to add retry and timeout logic. if \"create_instance\" not in", "project in which to create the instance. Values are of", "delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Deletes an", "policy. Authorization requires ``spanner.instances.setIamPolicy`` on ``resource``. Example: >>> from google.cloud", "at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "client.get_instance_config(name) Args: name (str): Required. The name of the requested", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "add retry and timeout logic. if \"delete_instance\" not in self._inner_api_calls:", "the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64", "... # Handle result. ... result = operation_future.result() >>> >>>", "provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError:", "instance = {} >>> >>> # TODO: Initialize `field_mask`: >>>", "google.api_core.client_options.from_dict( client_options ) if client_options.api_endpoint: api_endpoint = client_options.api_endpoint # Instantiate", "... pass Args: parent (str): Required. The name of the", "~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to ``GetIamPolicy``.", "instances that exist and their sizes. After an instance exists,", "on the specified instance resource. Attempting this RPC on a", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Creates an instance and begins preparing", "as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A", "to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save", "Required. The name of the project in which to create", "request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance_config( self,", ">>> # TODO: Initialize `resource`: >>> resource = '' >>>", "element in page: ... # process element ... pass Args:", "policy if an instance exists but does not have a", "InstanceAdmin API.\"\"\" import functools import pkg_resources import warnings from google.oauth2", "AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"](", "in the given project. Example: >>> from google.cloud import spanner_admin_instance_v1", "name contains the string \"howl\". - ``name:HOWL`` --> Equivalent to", "transport method to add retry and timeout logic. if \"test_iam_permissions\"", "that if ``retry`` is specified, the timeout applies to each", "@classmethod def project_path(cls, project): \"\"\"Return a fully-qualified project string.\"\"\" return", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator(", "https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "metadata=metadata ) def get_iam_policy( self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "transport instance to ``transport``; doing so will raise an exception.", "sets its metadata's ``cancel_time``, and begins restoring resources to their", "The Cloud Spanner Instance Admin API can be used to", "instance configuration. Values are of the form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]):", "the given project. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "and the value of the label contains the string \"dev\".", "add retry and timeout logic. if \"get_instance\" not in self._inner_api_calls:", "timeout logic. if \"update_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ]", "file except in compliance with the License. # You may", "the access control policy for an instance resource. Returns an", "and \" \"credentials; these are mutually exclusive.\" ) self.transport =", "deleted. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "API can be used to create, delete, modify and list", "the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials):", "operation renders the instance immediately unreadable via the API. -", "resource levels. Upon completion of the returned operation: - Billing", "resource = '' >>> >>> response = client.get_iam_policy(resource) Args: resource", "the API, with all requested attributes but no allocated resources.", "``<parent>/instances/<instance_id>``. If a dict is provided, it must be of", "# Wrap the transport method to add retry and timeout", "The default transport uses the gRPC protocol. This argument may", "channel (grpc.Channel): DEPRECATED. A ``Channel`` instance through which to make", "always be specified; this prevents any future fields in ``Instance``", "logic. if \"get_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ] =", "self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def set_iam_policy( self, resource,", "to add retry and timeout logic. if \"update_instance\" not in", "from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport, )", "to the service account private key json file. args: Additional", "self._inner_api_calls[ \"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, )", "): \"\"\" Creates an instance and begins preparing it to", "the instance (though there may be additional network bandwidth charges).", "API Endpoint should be set through client_options. \"\"\" # Raise", "as the first argument and the default transport class as", "The operation is guaranteed to succeed at undoing all resource", "else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) if client_info", ">>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]')", "the databases is permanently deleted. Example: >>> from google.cloud import", "= [(\"resource\", resource)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(", "None: metadata = [] metadata = list(metadata) try: routing_header =", "- ``name:Howl`` --> The instance's name contains the string \"howl\".", "to the constructor. Returns: InstanceAdminClient: The constructed client. \"\"\" credentials", "= spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is None: metadata = [] metadata", "that should be returned. If absent, all ``Instance`` fields are", ">>> from google.cloud import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient()", "governing permissions and # limitations under the License. \"\"\"Accesses the", "`client_config` argument is deprecated.\", PendingDeprecationWarning, stacklevel=2, ) else: client_config =", "for page in client.list_instance_configs(parent).pages: ... for element in page: ...", "mutually exclusive with providing a transport instance to ``transport``; doing", "self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info,", "Some examples of using filters are: - ``name:*`` --> The", "dictates where the serving resources for the Cloud Spanner instance", "for specifying options to ``GetIamPolicy``. This field is only used", "method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason.", "of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can also iterate over the pages", "project, instance_config): \"\"\"Return a fully-qualified instance_config string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\",", "instance databases can affect each other. For example, if one", "operation = self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic(", "google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance( self, instance,", "application to the service. If none are specified, the client", "requested instance configuration. Values are of the form ``projects/<project>/instanceConfigs/<config>``. retry", "the proper # transport methods, wrapped with `wrap_method` to add", "def instance_path(cls, project, instance): \"\"\"Return a fully-qualified instance string.\"\"\" return", "You can also iterate over the pages of the response", "in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout,", "Additional arguments to pass to the constructor. kwargs: Additional arguments", "channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo(", "``name:howl labels.env:dev`` --> The instance's name contains \"howl\" and it", "in a NOT_FOUND error if the user has ``spanner.instances.list`` permission", "successful. Authorization requires ``spanner.instances.update`` permission on resource ``name``. Example: >>>", "policy on an instance resource. Replaces any existing policy. Authorization", "to check for the ``resource``. Permissions with wildcards (such as", "KIND, either express or implied. # See the License for", "client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version", "is specified, requests will be retried using a default configuration.", "due to a retryable error and retry attempts failed. ValueError:", "`wrap_method` to add retry, # timeout, and the like. self._inner_api_calls", "the instance's reserved resources. Soon afterward: - The instance and", "constructor. kwargs: Additional arguments to pass to the constructor. Returns:", "timeout logic. if \"create_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ]", "the instance name. Otherwise, only fields mentioned in ``field_mask`` need", "timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, )", "Required. The name of the project for which a list", "that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance.", "this client. This is the key used to # find", "method to add retry and timeout logic. if \"update_instance\" not", "permissions. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id,", ">>> >>> response = client.get_instance(name) Args: name (str): Required. The", "- The instance's allocated resource levels are readable via the", "# find the method configuration in the client_config dictionary. _INTERFACE_NAME", "service_account import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import", "Databases can be created in the instance. - The instance's", "this prevents any future fields in ``Instance`` from being erased", "lower than the requested levels). - Databases can be created", "exclusive with providing a transport instance to ``transport``; doing so", "(the \"License\"); # you may not use this file except", "create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "Lists all instances in the given project. Example: >>> from", "options=options_ ) if metadata is None: metadata = [] metadata", "self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Updates an", "None: warnings.warn( \"The `client_config` argument is deprecated.\", PendingDeprecationWarning, stacklevel=2, )", "is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises:", "not have a policy set. Authorization requires ``spanner.instances.getIamPolicy`` on ``resource``.", "an exception. client_config (dict): DEPRECATED. A dictionary of call options", "complete. Note that if ``retry`` is specified, the timeout applies", "Upon completion of the returned operation: - Billing for all", "each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is", "is deprecated; use \" \"`transport` instead.\", PendingDeprecationWarning, stacklevel=2, ) api_endpoint", "self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask", "for which the policy is being requested. See the operation", "# Save a dictionary of cached API call functions. #", "client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>>", "and begins restoring resources to their pre-request values. The operation", "arguments to pass to the constructor. kwargs: Additional arguments to", "responsible for handling serialization and # deserialization and actually sending", "which the policy is being specified. See the operation documentation", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry,", "response = client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future): ...", "else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out", "# # Unless required by applicable law or agreed to", "not affect other instances. However, within an instance databases can", "TODO: Initialize `permissions`: >>> permissions = [] >>> >>> response", "google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1 import google.api_core.page_iterator import google.api_core.path_template import", "# process element ... pass >>> >>> >>> # Alternatively:", "Cancelling the operation sets its metadata's ``cancel_time``, and begins restoring", "if the user has ``spanner.instances.list`` permission on the containing Google", ">>> >>> parent = client.project_path('[PROJECT]') >>> >>> # Iterate over", "\"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request", "Values are of the form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A retry", "has on the specified instance resource. Attempting this RPC on", "mutually exclusive.\" ) self.transport = transport else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport(", "its metadata's ``cancel_time``, and begins restoring resources to their pre-request", "instance exists, there are no additional per-database or per-operation charges", "path to the service account private key json file. args:", "the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used", "\"\"\" Sets the access control policy on an instance resource.", "not in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry,", "AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return", "results one page at a time >>> for page in", "implied. # See the License for the specific language governing", "in length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create.", "10s of KB. An empty policy is a valid policy", "page streaming is performed per-page, this determines the maximum number", "to the ``resource``. The size of the policy is limited", "credentials: raise ValueError( \"Received both a transport instance and \"", "may suffer. \"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The default address of", "to create the instance. Values are of the form ``projects/<project>``.", "them. If a dict is provided, it must be of", "client_info=self._client_info, ) request = iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ ) if metadata", "\" \"`transport` instead.\", PendingDeprecationWarning, stacklevel=2, ) api_endpoint = self.SERVICE_ADDRESS if", "# Iterate over results one page at a time >>>", "and consumes most of the instance resources, fewer resources are", "ceases for all of the instance's reserved resources. Soon afterward:", "\"\"\" Returns permissions that the caller has on the specified", "transport uses the gRPC protocol. This argument may also be", "are not allowed. For more information see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.", "for the appropriate value for this field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]):", "metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def", "\"\"\" # Raise deprecation warnings for things we want to", ">>> >>> # TODO: Initialize `instance_id`: >>> instance_id = ''", "and can be used to track creation of the instance.", "instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: client_info", "import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers", "import pkg_resources import warnings from google.oauth2 import service_account import google.api_core.client_options", "upon completion of the request: - Billing ceases for all", ">>> response = client.get_instance_config(name) Args: name (str): Required. The name", "= {} >>> >>> response = client.create_instance(parent, instance_id, instance) >>>", "individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided", "Spanner instance are located (e.g., US-central, Europe). Configurations are created", "page_size (int): The maximum number of resources contained in the", "and begins preparing it to begin serving. The returned ``long-running", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout,", "metadata=None, ): \"\"\" Creates an instance and begins preparing it", "parent=parent, page_size=page_size ) if metadata is None: metadata = []", "Cloud Spanner databases. Each instance has a \"configuration\", which dictates", "actually making the API calls. The default transport uses the", "for use of the instance (though there may be additional", ") request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if metadata is", "value of the label contains the string \"dev\". - ``name:howl", "def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Deletes", "- Billing for all successfully-allocated resources begins (some types may", ">>> response = client.set_iam_policy(resource, policy) Args: resource (str): REQUIRED: The", "= [] metadata = list(metadata) try: routing_header = [(\"resource\", resource)]", ">>> client.delete_instance(name) Args: name (str): Required. The name of the", "Billing for all successfully-allocated resources begins (some types may have", "`resource`: >>> resource = '' >>> >>> response = client.get_iam_policy(resource)", "be additional network bandwidth charges). Instances offer isolation: problems with", "resources are available for serving the instance's tables. - The", "Unless required by applicable law or agreed to in writing,", "iam_policy_pb2 from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from", "the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "Spanner instance resource will result in a NOT_FOUND error if", "self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info,", "retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"test_iam_permissions\"]( request, retry=retry, timeout=timeout, metadata=metadata", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry,", "types for which a decrease in the instance's allocation has", "`channel` argument is deprecated; use \" \"`transport` instead.\", PendingDeprecationWarning, stacklevel=2,", "the specific language governing permissions and # limitations under the", "\"list_instance_configs\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs,", "): \"\"\" Sets the access control policy on an instance", "warnings for things we want to go away. if client_config", "through client_options. \"\"\" # Raise deprecation warnings for things we", "in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout,", ">>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]') >>>", "SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The default address of the service.\"\"\" #", "REQUIRED: The resource for which the policy is being specified.", "# The transport is responsible for handling serialization and #", "is ``CREATING``. Until completion of the returned operation: - Cancelling", "Immediately upon completion of the request: - Billing ceases for", "- Databases can be created in the instance. - The", "readable via the API. - The instance's state becomes ``READY``.", "format ``<instance_name>/operations/<operation_id>`` and can be used to track creation of", "dict: client_options = google.api_core.client_options.from_dict( client_options ) if client_options.api_endpoint: api_endpoint =", ">>> >>> response = client.set_iam_policy(resource, policy) Args: resource (str): REQUIRED:", "to update, which must always include the instance name. Otherwise,", "update, which must always include the instance name. Otherwise, only", ">>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize", "google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent,", "list of supported instance configurations is requested. Values are of", "of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask", "resource for which the policy detail is being requested. See", "this field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to", "at a time >>> for page in client.list_instances(parent).pages: ... for", "returned ``long-running operation`` can be used to track the progress", "get_iam_policy( self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets", "being requested. See the operation documentation for the appropriate value", "with providing a transport instance to ``transport``; doing so will", "developing your own client library. client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client options", "the returned operation: - Billing begins for all successfully-allocated resources", "this RPC on a non-existent Cloud Spanner instance resource will", "- The instance can be deleted. - All other attempts", "self._inner_api_calls[ \"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, )", "from_service_account_json = from_service_account_file @classmethod def instance_path(cls, project, instance): \"\"\"Return a", "that the caller has on the specified instance resource. Attempting", "transport method to add retry and timeout logic. if \"get_instance\"", "of the instance (though there may be additional network bandwidth", "the request failed for any reason. google.api_core.exceptions.RetryError: If the request", ">>> resource = '' >>> >>> # TODO: Initialize `permissions`:", "which fields in ``Instance`` should be updated. The field mask", "the like. self._inner_api_calls = {} # Service calls def create_instance(", "self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info,", "the instance. - The instance's allocated resource levels are readable", "metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def", ") def __init__( self, transport=None, channel=None, credentials=None, client_config=None, client_info=None, client_options=None,", "instance receives a lot of requests and consumes most of", "appropriate value for this field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A", "response = client.set_iam_policy(resource, policy) Args: resource (str): REQUIRED: The resource", "string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, ) @classmethod def project_path(cls,", "same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A", "self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info,", "specified, requests will be retried using a default configuration. timeout", "\"\"\"Return a fully-qualified instance_config string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config,", "a time >>> for page in client.list_instances(parent).pages: ... for element", "--> The instance has the label \"env\" and the value", "request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is None: metadata = []", "--> Equivalent to above. - ``NAME:howl`` --> Equivalent to above.", "google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version", "client_config=None, client_info=None, client_options=None, ): \"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type],", "request failed due to a retryable error and retry attempts", "= list(metadata) try: routing_header = [(\"parent\", parent)] except AttributeError: pass", "to the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of", "a decrease in the instance's allocation has been requested, billing", "should be set through client_options. \"\"\" # Raise deprecation warnings", "client_options.api_endpoint # Instantiate the transport. # The transport is responsible", "instance does not exist, returns ``NOT_FOUND``. Immediately upon completion of", "configuration. Values are of the form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A", "field type is ``CreateInstanceMetadata``. The ``response`` field type is ``Instance``,", "= spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) if metadata is None:", "= spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `instance`: >>> instance", "or per-operation charges for use of the instance (though there", "applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata", "name is assigned by the caller. If the named instance", "items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance_config( self, name,", "transport method to add retry and timeout logic. if \"delete_instance\"", "supported instance configurations is requested. Values are of the form", "affect other instances. However, within an instance databases can affect", "Authorization requires ``spanner.instances.update`` permission on resource ``name``. Example: >>> from", "default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is None:", "name of the requested instance configuration. Values are of the", "documentation for the appropriate value for this field. policy (Union[dict,", "state is ``CREATING``. Until completion of the returned operation: -", "the credentials from the environment. This argument is mutually exclusive", "list instances. Instances are dedicated Cloud Spanner serving and storage", "transport method to add retry and timeout logic. if \"create_instance\"", ") def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout,", ") @classmethod def project_path(cls, project): \"\"\"Return a fully-qualified project string.\"\"\"", "The amount of time, in seconds, to wait for the", "the value of the label contains the string \"dev\". -", "away. if client_config is not None: warnings.warn( \"The `client_config` argument", "coding: utf-8 -*- # # Copyright 2020 Google LLC #", "Cloud Spanner Instance Admin API The Cloud Spanner Instance Admin", "self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets the", "resource for which the policy is being requested. See the", "client_options ) if client_options.api_endpoint: api_endpoint = client_options.api_endpoint # Instantiate the", "License. \"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import functools import pkg_resources", "arguments to pass to the constructor. Returns: InstanceAdminClient: The constructed", "based on resource availability. Cloud Spanner billing is based on", "resources. Soon afterward: - The instance and *all of its", "This argument may also be a callable which returns a", ">>> >>> # TODO: Initialize `resource`: >>> resource = ''", "rejected. - Reading the instance via the API continues to", "rules are case insensitive. The fields eligible for filtering are:", "the policy detail is being requested. See the operation documentation", "dictionary of call options for each method. If not specified,", "instance. If the named instance does not exist, returns ``NOT_FOUND``.", ">>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `instance`:", "instance's allocation has been requested, billing is based on the", "= google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstanceConfigsRequest(", "retry and timeout logic. if \"delete_instance\" not in self._inner_api_calls: self._inner_api_calls[", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"](", "InstanceAdminClient: The constructed client. \"\"\" credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] =", ">>> # TODO: Initialize `permissions`: >>> permissions = [] >>>", "each RPC # from the client configuration. # (Ordinarily, these", "= credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @classmethod def", "'' >>> >>> response = client.get_iam_policy(resource) Args: resource (str): REQUIRED:", "the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]):", "<reponame>qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3 # -*- coding: utf-8 -*- # # Copyright 2020", "if specified must be ``<parent>/instances/<instance_id>``. If a dict is provided,", "than the requested levels). - Databases can be created in", "The returned ``long-running operation`` will have a name of the", "instance name. Otherwise, only fields mentioned in ``field_mask`` need be", "\"howl\". - ``name:HOWL`` --> Equivalent to above. - ``NAME:howl`` -->", "be used to track creation of the instance. The ``metadata``", "field_mask = {} >>> >>> response = client.update_instance(instance, field_mask) >>>", ") metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def set_iam_policy(", "Required. The name of the requested instance configuration. Values are", "are of the form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A retry object", "``name:*`` --> The instance has a name. - ``name:Howl`` -->", "iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ ) if metadata is None: metadata =", "metadata=None, ): \"\"\" Returns permissions that the caller has on", "new instance. The instance name is assigned by the caller.", "`instance`: >>> instance = {} >>> >>> # TODO: Initialize", "page: ... # process element ... pass Args: parent (str):", "spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is None: metadata = [] metadata =", "name of the project in which to create the instance.", "for each RPC # from the client configuration. # (Ordinarily,", "request, retry=retry, timeout=timeout, metadata=metadata ) def test_iam_permissions( self, resource, permissions,", "the credentials as the first argument and the default transport", "resource for which the policy is being specified. See the", "is performed per-page, this determines the maximum number of resources", "than the requested levels). - All newly-reserved resources are available", "service.\"\"\" # The name of the interface for this client.", "method to add retry and timeout logic. if \"test_iam_permissions\" not", "-*- coding: utf-8 -*- # # Copyright 2020 Google LLC", "Instantiate the transport. # The transport is responsible for handling", "transport else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) if", "be used. Generally, you only need to set this if", "Creates an instance and begins preparing it to begin serving.", "be used to track the progress of updating the instance.", "~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which must always include", "key used to # find the method configuration in the", "google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent,", "\"update_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance,", ") metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def", "You may obtain a copy of the License at #", "metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig`", "this application to the service. If none are specified, the", "the progress of preparing the new instance. The instance name", "will result in a NOT_FOUND error if the user has", "by Google based on resource availability. Cloud Spanner billing is", "google.cloud import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>>", "An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can also iterate over", "resource. Returns an empty policy if an instance exists but", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"]( request, retry=retry,", "``transport``; doing so will raise an exception. client_config (dict): DEPRECATED.", "Project. Otherwise returns an empty set of permissions. Example: >>>", "\"\"\" Lists all instances in the given project. Example: >>>", "appropriate value for this field. permissions (list[str]): The set of", "callables which invoke the proper # transport methods, wrapped with", "name. Otherwise, only fields mentioned in ``field_mask`` need be included.", "as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object", "name of the interface for this client. This is the", "file. args: Additional arguments to pass to the constructor. kwargs:", "to begin serving. The returned ``long-running operation`` can be used", ">>> permissions = [] >>> >>> response = client.test_iam_permissions(resource, permissions)", "be of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask`", "\"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request", "channel=None, credentials=None, client_config=None, client_info=None, client_options=None, ): \"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport,", "retry and timeout logic. if \"test_iam_permissions\" not in self._inner_api_calls: self._inner_api_calls[", "have lower than the requested levels). - Databases can be", "not know about them. If a dict is provided, it", "instance_config=instance_config, ) @classmethod def project_path(cls, project): \"\"\"Return a fully-qualified project", "the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import functools import pkg_resources import warnings", "Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>>", "configuration. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "The complete policy to be applied to the ``resource``. The", "google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance, ) @classmethod def instance_config_path(cls, project, instance_config):", "the second argument. channel (grpc.Channel): DEPRECATED. A ``Channel`` instance through", "request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if metadata is None:", "for element in client.list_instance_configs(parent): ... # process element ... pass", "message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "(int): The maximum number of resources contained in the underlying", "then default info will be used. Generally, you only need", "routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "element in client.list_instances(parent): ... # process element ... pass >>>", "but does not have a policy set. Authorization requires ``spanner.instances.getIamPolicy``", "pre-request resource levels. Upon completion of the returned operation: -", "transport method to add retry and timeout logic. if \"get_instance_config\"", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request =", "logic. if \"get_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ] =", "resource. Replaces any existing policy. Authorization requires ``spanner.instances.setIamPolicy`` on ``resource``.", "instance and begins preparing it to begin serving. The returned", "= {} >>> >>> response = client.set_iam_policy(resource, policy) Args: resource", "credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials return cls(*args, **kwargs) from_service_account_json", "import warnings from google.oauth2 import service_account import google.api_core.client_options import google.api_core.gapic_v1.client_info", "only fields mentioned in ``field_mask`` need be included. If a", "to succeed at undoing all resource changes, after which point", "options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options", "find the method configuration in the client_config dictionary. _INTERFACE_NAME =", "of the project for which a list of instances is", "\"projects/{project}/instances/{instance}\", project=project, instance=instance, ) @classmethod def instance_config_path(cls, project, instance_config): \"\"\"Return", "form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 characters", "complete policy to be applied to the ``resource``. The size", "specified, the client will attempt to ascertain the credentials from", "instance modification. The ``metadata`` field type is ``UpdateInstanceMetadata``. The ``response``", "or releasing resources as requested. The returned ``long-running operation`` can", "list(metadata) try: routing_header = [(\"instance.name\", instance.name)] except AttributeError: pass else:", "``Instance``, if successful. Authorization requires ``spanner.instances.update`` permission on resource ``name``.", "transport=None, channel=None, credentials=None, client_config=None, client_info=None, client_options=None, ): \"\"\"Constructor. Args: transport", "transport instance and \" \"credentials; these are mutually exclusive.\" )", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets the access control policy for", "clients that do not know about them. If a dict", "metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def test_iam_permissions(", "with `wrap_method` to add retry, # timeout, and the like.", "that exist and their sizes. After an instance exists, there", "# Handle metadata. >>> metadata = response.metadata() Args: instance (Union[dict,", "resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used", "a page. filter_ (str): An expression for filtering the results", "Initialize `instance`: >>> instance = {} >>> >>> # TODO:", "policy_pb2 from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 from", "The instance's state becomes ``READY``. The returned ``long-running operation`` will", "@classmethod def from_service_account_file(cls, filename, *args, **kwargs): \"\"\"Creates an instance of", "not allowed. For more information see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry", "client_options: if type(client_options) == dict: client_options = google.api_core.client_options.from_dict( client_options )", "in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout,", "used. Generally, you only need to set this if you're", "``NOT_FOUND``. Immediately upon completion of this request: - For resource", "the string \"dev\". - ``name:howl labels.env:dev`` --> The instance's name", "logic. if \"get_instance_config\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ] =", "google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If", "caller. If the named instance already exists, ``CreateInstance`` returns ``ALREADY_EXISTS``.", "of the returned operation: - Billing for all successfully-allocated resources", "to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If", "License. # You may obtain a copy of the License", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists the supported instance configurations", "The maximum number of resources contained in the underlying API", "instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Creates an instance and", "not in self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry,", "instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can also iterate", "included. If a dict is provided, it must be of", "\"\"\" Gets information about a particular instance. Example: >>> from", "`pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "parameters are invalid. \"\"\" # Wrap the transport method to", "Cloud Spanner Instance Admin API can be used to create,", "instance. The ``metadata`` field type is ``CreateInstanceMetadata``. The ``response`` field", "on the instances that exist and their sizes. After an", "has been requested, billing is based on the newly-requested level.", "instance resource will result in a NOT_FOUND error if the", "client_info=self._client_info, ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata is None:", "Required. The name of the instance to be deleted. Values", "import service_account import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method", "all instances in the given project. Example: >>> from google.cloud", "which returns a transport instance. Callables will be sent the", "used to # find the method configuration in the client_config", "list(metadata) try: routing_header = [(\"name\", name)] except AttributeError: pass else:", "specified instance resource. Attempting this RPC on a non-existent Cloud", "'[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name (str): Required.", "operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance( self, instance, field_mask,", "the request: - Billing ceases for all of the instance's", "environment. This argument is mutually exclusive with providing a transport", "= google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save a dictionary of cached", "of the form ``projects/<project>``. instance_id (str): Required. The ID of", "Instance Admin API can be used to create, delete, modify", "a transport instance to ``transport``; doing so will raise an", "used to track the instance modification. The ``metadata`` field type", "provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError:", "A dictionary of call options for each method. If not", "the API calls. The default transport uses the gRPC protocol.", "``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The", "terminates with a ``CANCELLED`` status. - All other attempts to", "all results >>> for element in client.list_instances(parent): ... # process", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Sets the access control policy", "list of instances is requested. Values are of the form", "mask specifying which fields in ``Instance`` should be updated. The", "is responsible for handling serialization and # deserialization and actually", "transport class as the second argument. channel (grpc.Channel): DEPRECATED. A", "instance=instance, field_mask=field_mask ) if metadata is None: metadata = []", "instance. - The instance's allocated resource levels are readable via", "not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The", "point it terminates with a ``CANCELLED`` status. - All other", "to # find the method configuration in the client_config dictionary.", "resource = '' >>> >>> # TODO: Initialize `permissions`: >>>", "a retryable error and retry attempts failed. ValueError: If the", "resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Returns permissions that", "routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance.", "operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>>", "request to complete. Note that if ``retry`` is specified, the", "metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def list_instances(", "metadata=None, ): \"\"\" Deletes an instance. Immediately upon completion of", "# # Copyright 2020 Google LLC # # Licensed under", "default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ )", "levels are readable via the API. - The instance's state", "timeout=timeout, metadata=metadata, ), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator", "timeout logic. if \"delete_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ]", "Instances are dedicated Cloud Spanner serving and storage resources to", "# The name of the interface for this client. This", "client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> client.delete_instance(name) Args: name (str): Required. The", "are: - ``name`` - ``display_name`` - ``labels.key`` where key is", "return value. If page streaming is performed per-page, this determines", "requests. If ``None``, then default info will be used. Generally,", "address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: client_info =", "to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If", "containing \"dev\". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "only used by Cloud IAM. If a dict is provided,", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"test_iam_permissions\"]( request,", "Wrap the transport method to add retry and timeout logic.", "self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Creates", "(e.g., US-central, Europe). Configurations are created by Google based on", "which a list of supported instance configurations is requested. Values", "from google.oauth2 import service_account import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config", "instance will not affect other instances. However, within an instance", "after which point it terminates with a ``CANCELLED`` status. -", "``spanner.instances.getIamPolicy`` on ``resource``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>>", "must be between 2 and 64 characters in length. instance", "the label contains the string \"dev\". - ``name:howl labels.env:dev`` -->", "= '' >>> >>> # TODO: Initialize `permissions`: >>> permissions", "Admin API The Cloud Spanner Instance Admin API can be", "method to add retry and timeout logic. if \"set_iam_policy\" not", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"test_iam_permissions\"](", "for the specific language governing permissions and # limitations under", "Initialize `field_mask`: >>> field_mask = {} >>> >>> response =", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout, metadata=metadata", ">>> >>> # TODO: Initialize `permissions`: >>> permissions = []", "- Billing begins for all successfully-allocated resources (some types may", "[] metadata = list(metadata) try: routing_header = [(\"name\", name)] except", "timeout=timeout, metadata=metadata ) def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "for all successfully-allocated resources begins (some types may have lower", "is permanently deleted. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size ) if", "return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance( self,", "request. Filter rules are case insensitive. The fields eligible for", ") @classmethod def instance_config_path(cls, project, instance_config): \"\"\"Return a fully-qualified instance_config", "at a time >>> for page in client.list_instance_configs(parent).pages: ... for", "return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, ) @classmethod def project_path(cls, project):", "TODO: Initialize `instance_id`: >>> instance_id = '' >>> >>> #", "does not have a policy set. Authorization requires ``spanner.instances.getIamPolicy`` on", "required by applicable law or agreed to in writing, software", "metadata = list(metadata) try: routing_header = [(\"resource\", resource)] except AttributeError:", "to add retry and timeout logic. if \"test_iam_permissions\" not in", "): \"\"\" Lists the supported instance configurations for a given", "policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to be applied", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "are dedicated Cloud Spanner serving and storage resources to be", "must be ``<parent>/instances/<instance_id>``. If a dict is provided, it must", "are of the form ``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask", "Spanner Instance Admin API can be used to create, delete,", "instance's tables. - The instance's new resource levels are readable", "in self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout,", "databases. Each instance has a \"configuration\", which dictates where the", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO:", ") if metadata is None: metadata = [] metadata =", "PendingDeprecationWarning, stacklevel=2, ) api_endpoint = self.SERVICE_ADDRESS if client_options: if type(client_options)", "located (e.g., US-central, Europe). Configurations are created by Google based", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation =", "# (Ordinarily, these are the defaults specified in the `*_config.py`", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if metadata", "other attempts to modify the instance are rejected. Upon completion", "key is the name of a label Some examples of", "[(\"name\", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header", "can be used to create, delete, modify and list instances.", "default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask )", "- The instance's new resource levels are readable via the", "instance_config string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, ) @classmethod def", "if successful. Authorization requires ``spanner.instances.update`` permission on resource ``name``. Example:", "- ``labels.key`` where key is the name of a label", "agreed to in writing, software # distributed under the License", "to requests. These credentials identify this application to the service.", "``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", ") request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_ ) if metadata", "import options_pb2 from google.iam.v1 import policy_pb2 from google.longrunning import operations_pb2", "an empty set of permissions. Example: >>> from google.cloud import", "``None``, then default info will be used. Generally, you only", "if you're developing your own client library. client_options (Union[dict, google.api_core.client_options.ClientOptions]):", "distributed under the License is distributed on an \"AS IS\"", "an empty policy if an instance exists but does not", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request,", "retry=retry, timeout=timeout, metadata=metadata ) def test_iam_permissions( self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT,", "(some types may have lower than the requested levels). -", "a ``CANCELLED`` status. - All other attempts to modify the", "\"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import functools import pkg_resources import", "``NAME:howl`` --> Equivalent to above. - ``labels.env:*`` --> The instance", "operation documentation for the appropriate value for this field. policy", "Cloud Project. Otherwise returns an empty set of permissions. Example:", "Initialize `policy`: >>> policy = {} >>> >>> response =", "exists, there are no additional per-database or per-operation charges for", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"](", "response = client.get_iam_policy(resource) Args: resource (str): REQUIRED: The resource for", "charges for use of the instance (though there may be", "InstanceAdminClient(object): \"\"\" Cloud Spanner Instance Admin API The Cloud Spanner", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request =", "the actual callables which invoke the proper # transport methods,", "if \"delete_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method(", "parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists all", "client. \"\"\" credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials return cls(*args,", "form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry", "levels). - All newly-reserved resources are available for serving the", "time, in seconds, to wait for the request to complete.", "field mask must always be specified; this prevents any future", "An empty policy is a valid policy but certain Cloud", "the service account private key json file. args: Additional arguments", "of the form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A retry object used", "the appropriate value for this field. permissions (list[str]): The set", "the policy is limited to a few 10s of KB.", "create. The name may be omitted, but if specified must", "routing_header = [(\"parent\", parent)] except AttributeError: pass else: routing_metadata =", "Initialize `resource`: >>> resource = '' >>> >>> # TODO:", "operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs( self, parent, page_size=None,", "timeout=timeout, metadata=metadata ) def list_instances( self, parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT,", "that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "import policy_pb2 from google.longrunning import operations_pb2 from google.protobuf import empty_pb2", "request, retry=retry, timeout=timeout, metadata=metadata ) def get_iam_policy( self, resource, options_=None,", "resource availability. Cloud Spanner billing is based on the instances", "``Channel`` instance through which to make calls. This argument is", "are rejected. Upon completion of the returned operation: - Billing", "instance_id=instance_id, instance=instance ) if metadata is None: metadata = []", "- ``labels.env:*`` --> The instance has the label \"env\". -", "deleted. - All other attempts to modify the instance are", "import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent", "2 and 64 characters in length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required.", "google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import functools import pkg_resources import warnings from", "metadata=metadata ) def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "response = client.update_instance(instance, field_mask) >>> >>> def callback(operation_future): ... #", "provided, it must be of the same form as the", "if \"set_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method(", "be created in the instance. - The instance's allocated resource", "all requested attributes but no allocated resources. Its state is", "the returned operation: - Cancelling the operation sets its metadata's", "Initialize `resource`: >>> resource = '' >>> >>> response =", "response using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions )", "the requested instance configuration. Values are of the form ``projects/<project>/instanceConfigs/<config>``.", "this request: - The instance is readable via the API,", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata", "this if you're developing your own client library. client_options (Union[dict,", "response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata()", "\"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request", "Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required.", "resources to their pre-request values. The operation is guaranteed to", "control policy on an instance resource. Replaces any existing policy.", "operation documentation for the appropriate value for this field. permissions", "google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.GetIamPolicyRequest( resource=resource,", "Values are of the form ``projects/<project>``. page_size (int): The maximum", "has the label \"env\" with its value containing \"dev\". retry", "set of permissions. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "A :class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "method to add retry and timeout logic. if \"list_instance_configs\" not", "for any reason. google.api_core.exceptions.RetryError: If the request failed due to", "metadata. >>> metadata = response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required.", ") metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def", "parent=parent, instance_id=instance_id, instance=instance ) if metadata is None: metadata =", "provided credentials file. Args: filename (str): The path to the", "the format ``<instance_name>/operations/<operation_id>`` and can be used to track the", "erased accidentally by clients that do not know about them.", "Returns permissions that the caller has on the specified instance", "import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from google.iam.v1 import iam_policy_pb2", "the instance resources, fewer resources are available for other databases", "if \"list_instances\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method(", "iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can also iterate over the", "are created by Google based on resource availability. Cloud Spanner", "``Instance`` fields are returned. If a dict is provided, it", "OR CONDITIONS OF ANY KIND, either express or implied. #", "These credentials identify this application to the service. If none", "``name`` - ``display_name`` - ``labels.key`` where key is the name", "which the policy detail is being requested. See the operation", "to track the instance modification. The ``metadata`` field type is", "operation documentation for the appropriate value for this field. options_", "the License is distributed on an \"AS IS\" BASIS, #", "dedicated Cloud Spanner serving and storage resources to be used", "add retry and timeout logic. if \"update_instance\" not in self._inner_api_calls:", "``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``, if successful. Authorization", "their performance may suffer. \"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The default", "fields in ``Instance`` from being erased accidentally by clients that", "and timeout logic. if \"set_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\"", "created by Google based on resource availability. Cloud Spanner billing", "the string \"howl\". - ``name:HOWL`` --> Equivalent to above. -", "pre-request values. The operation is guaranteed to succeed at undoing", "specified; this prevents any future fields in ``Instance`` from being", "bandwidth charges). Instances offer isolation: problems with databases in one", "transport method to add retry and timeout logic. if \"update_instance\"", "from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from google.longrunning", "timeout for each RPC # from the client configuration. #", "from being erased accidentally by clients that do not know", "project, instance): \"\"\"Return a fully-qualified instance string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\",", "from google.iam.v1 import policy_pb2 from google.longrunning import operations_pb2 from google.protobuf", "instances. However, within an instance databases can affect each other.", "(Ordinarily, these are the defaults specified in the `*_config.py` #", "self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs( self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT,", "string \"dev\". - ``name:howl labels.env:dev`` --> The instance's name contains", "``Instance`` should be updated. The field mask must always be", "if transport: if callable(transport): self.transport = transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint,", "as requested. The returned ``long-running operation`` can be used to", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Creates an instance and begins", "law or agreed to in writing, software # distributed under", "policy=policy) if metadata is None: metadata = [] metadata =", "this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save a", "of the instance to create. Valid identifiers are of the", "# from the client configuration. # (Ordinarily, these are the", "metadata=None, ): \"\"\" Lists all instances in the given project.", "of this client using the provided credentials file. Args: filename", "= client.update_instance(instance, field_mask) >>> >>> def callback(operation_future): ... # Handle", "parameter does not affect the return value. If page streaming", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about a particular", "method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "= [] metadata = list(metadata) try: routing_header = [(\"instance.name\", instance.name)]", ") return iterator def get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "instance, and their performance may suffer. \"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\"", "Its state is ``CREATING``. Until completion of the returned operation:", "``CREATING``. Until completion of the returned operation: - Cancelling the", "resources in a page. filter_ (str): An expression for filtering", "the key used to # find the method configuration in", "set this if you're developing your own client library. client_options", "based on the newly-requested level. Until completion of the returned", "= service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials return cls(*args, **kwargs) from_service_account_json =", ">>> response = client.update_instance(instance, field_mask) >>> >>> def callback(operation_future): ...", "add retry and timeout logic. if \"list_instance_configs\" not in self._inner_api_calls:", "maximum number of resources in a page. filter_ (str): An", "the operation documentation for the appropriate value for this field.", "from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from google.iam.v1 import iam_policy_pb2 from google.iam.v1", "# Raise deprecation warnings for things we want to go", ") return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance(", "this field. permissions (list[str]): The set of permissions to check", "process element ... pass Args: parent (str): Required. The name", "to pass to the constructor. Returns: InstanceAdminClient: The constructed client.", "import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header", "# TODO: Initialize `instance_id`: >>> instance_id = '' >>> >>>", "type(client_options) == dict: client_options = google.api_core.client_options.from_dict( client_options ) if client_options.api_endpoint:", "may obtain a copy of the License at # #", "the service.\"\"\" # The name of the interface for this", ") request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size ) if metadata is", "``metadata`` field type is ``CreateInstanceMetadata``. The ``response`` field type is", "create. Valid identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must", "credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else: if credentials: raise ValueError( \"Received", "about a particular instance configuration. Example: >>> from google.cloud import", "set. Authorization requires ``spanner.instances.getIamPolicy`` on ``resource``. Example: >>> from google.cloud", "delete, modify and list instances. Instances are dedicated Cloud Spanner", "metadata=metadata, ), request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def", ">>> response = client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future):", "to retry requests. If ``None`` is specified, requests will be", "address of the service.\"\"\" # The name of the interface", "requested levels). - All newly-reserved resources are available for serving", "may not use this file except in compliance with the", "updating the instance. If the named instance does not exist,", "... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> #", "request, retry=retry, timeout=timeout, metadata=metadata ) def list_instances( self, parent, page_size=None,", "routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "``READY``. The returned ``long-running operation`` will have a name of", "instance. Values are of the form ``projects/<project>``. instance_id (str): Required.", "self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def list_instances( self, parent,", ") from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from", "this file except in compliance with the License. # You", ">>> # TODO: Initialize `instance`: >>> instance = {} >>>", "protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used to", "= pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\" Cloud Spanner Instance Admin API", "give the pre-request resource levels. Upon completion of the returned", "name of a label Some examples of using filters are:", "is performed per- resource, this parameter does not affect the", "levels). - Databases can be created in the instance. -", "type is ``CreateInstanceMetadata``. The ``response`` field type is ``Instance``, if", "request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata is None: metadata =", "as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object", "include the instance name. Otherwise, only fields mentioned in ``field_mask``", "instance. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "options for each method. If not specified, the default configuration", "instance to create. The name may be omitted, but if", "`IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A retry object used to", "being erased accidentally by clients that do not know about", "transport is responsible for handling serialization and # deserialization and", "Equivalent to above. - ``labels.env:*`` --> The instance has the", "# # Licensed under the Apache License, Version 2.0 (the", "self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Sets the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "*all of its databases* immediately and irrevocably disappear from the", "\"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request", "(Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields in ``Instance``", "= client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>> response = client.get_instance_config(name) Args: name", "self._inner_api_calls[ \"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, )", "result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle", "try: routing_header = [(\"resource\", resource)] except AttributeError: pass else: routing_metadata", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request =", "credentials=None, client_config=None, client_info=None, client_options=None, ): \"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials,", "= from_service_account_file @classmethod def instance_path(cls, project, instance): \"\"\"Return a fully-qualified", "retry=retry, timeout=timeout, metadata=metadata ) def list_instances( self, parent, page_size=None, filter_=None,", "is limited to a few 10s of KB. An empty", "timeout logic. if \"get_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ]", "project=project, instance=instance, ) @classmethod def instance_config_path(cls, project, instance_config): \"\"\"Return a", "field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields in", "is mutually exclusive with ``credentials``; providing both will raise an", "client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent", "retry, # timeout, and the like. self._inner_api_calls = {} #", "for all successfully-allocated resources (some types may have lower than", "set_iam_policy( self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Sets", "permissions (list[str]): The set of permissions to check for the", ">>> response = client.get_iam_policy(resource) Args: resource (str): REQUIRED: The resource", "are located (e.g., US-central, Europe). Configurations are created by Google", "``GetPolicyOptions`` object for specifying options to ``GetIamPolicy``. This field is", "= client.get_instance_config(name) Args: name (str): Required. The name of the", "\"delete_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance,", "instance_config_path(cls, project, instance_config): \"\"\"Return a fully-qualified instance_config string.\"\"\" return google.api_core.path_template.expand(", "does not exist, returns ``NOT_FOUND``. Immediately upon completion of this", "page streaming is performed per- resource, this parameter does not", "the client_config dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls, filename,", "of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata", ">>> def callback(operation_future): ... # Handle result. ... result =", "name of the project for which a list of instances", "[] metadata = list(metadata) try: routing_header = [(\"parent\", parent)] except", "in the instance. - The instance's allocated resource levels are", "dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls, filename, *args, **kwargs):", "\"\"\" Creates an instance and begins preparing it to begin", "the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]):", "--> Equivalent to above. - ``labels.env:*`` --> The instance has", "client.get_instance(name) Args: name (str): Required. The name of the requested", "message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "\"create_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance,", "instance resource. Returns an empty policy if an instance exists", "a \"configuration\", which dictates where the serving resources for the", "or implied. # See the License for the specific language", "Billing begins for all successfully-allocated resources (some types may have", "the form ``projects/<project>``. page_size (int): The maximum number of resources", "= [] metadata = list(metadata) try: routing_header = [(\"parent\", parent)]", "_INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): \"\"\"Creates", "if an instance exists but does not have a policy", ">>> # Handle metadata. >>> metadata = response.metadata() Args: instance", "Args: name (str): Required. The name of the requested instance.", "the API. The returned ``long-running operation`` will have a name", "the API. All data in the databases is permanently deleted.", "above. - ``NAME:howl`` --> Equivalent to above. - ``labels.env:*`` -->", "resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Sets the access", "the form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to", ">>> resource = '' >>> >>> # TODO: Initialize `policy`:", "a fully-qualified project string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\", project=project ) def", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request =", "retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If", "create, delete, modify and list instances. Instances are dedicated Cloud", "``resource``. Permissions with wildcards (such as '*' or 'storage.*') are", "most of the instance resources, fewer resources are available for", "contained in the underlying API response. If page streaming is", "client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client options used to set user options", "to a retryable error and retry attempts failed. ValueError: If", "), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance_config(", "API The Cloud Spanner Instance Admin API can be used", "google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance,", "= google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request,", "- For resource types for which a decrease in the", "Equivalent to above. - ``NAME:howl`` --> Equivalent to above. -", "client_config is not None: warnings.warn( \"The `client_config` argument is deprecated.\",", "mentioned in ``field_mask`` need be included. If a dict is", "``None`` is specified, requests will be retried using a default", "api_endpoint = client_options.api_endpoint # Instantiate the transport. # The transport", ":class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "modify the instance are rejected. Upon completion of the returned", "import enums from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import (", "google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save a dictionary of cached API", "metadata=None, ): \"\"\" Updates an instance, and begins allocating or", "about them. If a dict is provided, it must be", "instance configuration. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "instance_id = '' >>> >>> # TODO: Initialize `instance`: >>>", "The name of the requested instance. Values are of the", "instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason.", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout,", "to add retry and timeout logic. if \"get_iam_policy\" not in", "additional per-database or per-operation charges for use of the instance", "call functions. # These are the actual callables which invoke", ">>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]')", "request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance( self, name, field_mask=None,", "from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION =", "returned operation: - Cancelling the operation sets its metadata's ``cancel_time``,", ">>> >>> # Iterate over all results >>> for element", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request =", "list_instance_configs( self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists", "``resource``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "\"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): \"\"\"Creates an instance", "The client info used to send a user-agent string along", "element ... pass Args: parent (str): Required. The name of", ") return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs(", "Raise deprecation warnings for things we want to go away.", "omitted, but if specified must be ``<parent>/instances/<instance_id>``. If a dict", "`field_mask`: >>> field_mask = {} >>> >>> response = client.update_instance(instance,", "no allocated resources. Its state is ``CREATING``. Until completion of", "= client.set_iam_policy(resource, policy) Args: resource (str): REQUIRED: The resource for", "make calls. This argument is mutually exclusive with ``credentials``; providing", "name contains \"howl\" and it has the label \"env\" with", "= iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) if metadata is None: metadata", "seconds, to wait for the request to complete. Note that", "\"The `client_config` argument is deprecated.\", PendingDeprecationWarning, stacklevel=2, ) else: client_config", "completion of this request: - The instance is readable via", "name of the format ``<instance_name>/operations/<operation_id>`` and can be used to", ">>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `instance_id`:", "- ``labels.env:dev`` --> The instance has the label \"env\" and", "type is ``Instance``, if successful. Example: >>> from google.cloud import", "filter_ (str): An expression for filtering the results of the", "a name. - ``name:Howl`` --> The instance's name contains the", ">>> >>> response = client.get_instance_config(name) Args: name (str): Required. The", "databases is permanently deleted. Example: >>> from google.cloud import spanner_admin_instance_v1", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry,", "attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to", "configuration. timeout (Optional[float]): The amount of time, in seconds, to", "- ``name:HOWL`` --> Equivalent to above. - ``NAME:howl`` --> Equivalent", "performed per-page, this determines the maximum number of resources in", "instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which must", "wildcards (such as '*' or 'storage.*') are not allowed. For", "\"`transport` instead.\", PendingDeprecationWarning, stacklevel=2, ) api_endpoint = self.SERVICE_ADDRESS if client_options:", "client.list_instances(parent).pages: ... for element in page: ... # process element", "the label \"env\" with its value containing \"dev\". retry (Optional[google.api_core.retry.Retry]):", "name (str): Required. The name of the instance to be", "Required. The instance to create. The name may be omitted,", "\"dev\". - ``name:howl labels.env:dev`` --> The instance's name contains \"howl\"", "self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about", "to the service. If none are specified, the client will", "\"configuration\", which dictates where the serving resources for the Cloud", "Generally, you only need to set this if you're developing", "client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name (str):", "resource. Attempting this RPC on a non-existent Cloud Spanner instance", "from google.cloud import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>>", "timeout (Optional[float]): The amount of time, in seconds, to wait", ">>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> client.delete_instance(name) Args: name", "parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists the supported", "it terminates with a ``CANCELLED`` status. - All other attempts", "subset of ``Instance`` fields that should be returned. If absent,", "succeed at undoing all resource changes, after which point it", "The instance's allocated resource levels are readable via the API.", "default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance", "an instance exists but does not have a policy set.", "Valid identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be", "requests. If ``None`` is specified, requests will be retried using", "All other attempts to modify the instance are rejected. Upon", "as '*' or 'storage.*') are not allowed. For more information", "to send a user-agent string along with API requests. If", "wrapped with `wrap_method` to add retry, # timeout, and the", "method to add retry and timeout logic. if \"delete_instance\" not", "form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If", "= _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default", "default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata", "= spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>>", "provided to the method. Returns: A :class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError:", "instances. Instances are dedicated Cloud Spanner serving and storage resources", "= self.SERVICE_ADDRESS if client_options: if type(client_options) == dict: client_options =", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> client.delete_instance(name)", "= client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future): ... #", "unreadable via the API. - The instance can be deleted.", "Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "be used by Cloud Spanner databases. Each instance has a", "in the `*_config.py` # file next to this one.) self._method_configs", ") metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout,", "the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask", "an instance of this client using the provided credentials file.", "client.project_path('[PROJECT]') >>> >>> # Iterate over all results >>> for", "Iterate over all results >>> for element in client.list_instance_configs(parent): ...", "Projects) might reject them. If a dict is provided, it", "method to add retry and timeout logic. if \"list_instances\" not", "info will be used. Generally, you only need to set", "protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to", "... # process element ... pass >>> >>> >>> #", "status. - All other attempts to modify the instance are", "``CANCELLED`` status. - All other attempts to modify the instance", "type is ``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``, if", "lot of requests and consumes most of the instance resources,", "for each method. If not specified, the default configuration is", "the caller. If the named instance already exists, ``CreateInstance`` returns", "for this field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy", "options on the client. API Endpoint should be set through", "routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry,", "permissions that the caller has on the specified instance resource.", "a dictionary of cached API call functions. # These are", "Endpoint should be set through client_options. \"\"\" # Raise deprecation", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator =", "Handle metadata. >>> metadata = response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]):", "additional network bandwidth charges). Instances offer isolation: problems with databases", "\"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request", "type], ~.InstanceAdminGrpcTransport]): A transport instance, responsible for actually making the", "protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to", "copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # #", "Google LLC # # Licensed under the Apache License, Version", "resource (str): REQUIRED: The resource for which the policy detail", "self._client_info = client_info # Parse out the default settings for", "the pages of the response using its `pages` property. Raises:", "the named instance does not exist, returns ``NOT_FOUND``. Immediately upon", "a few 10s of KB. An empty policy is a", "in writing, software # distributed under the License is distributed", "instances is requested. Values are of the form ``projects/<project>``. page_size", "with wildcards (such as '*' or 'storage.*') are not allowed.", "assigned by the caller. If the named instance already exists,", "operation`` can be used to track the progress of updating", "of updating the instance. If the named instance does not", "first argument and the default transport class as the second", "of the instance's reserved resources. Soon afterward: - The instance", "``Instance`` fields that should be returned. If absent, all ``Instance``", "response = client.get_instance(name) Args: name (str): Required. The name of", "label \"env\" and the value of the label contains the", ">>> # TODO: Initialize `instance_id`: >>> instance_id = '' >>>", "a non-existent Cloud Spanner instance resource will result in a", "google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation import", "methods, wrapped with `wrap_method` to add retry, # timeout, and", "= client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> client.delete_instance(name) Args: name (str): Required.", "The instance name is assigned by the caller. If the", "be deleted. - All other attempts to modify the instance", "and # limitations under the License. \"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin", "returns ``ALREADY_EXISTS``. Immediately upon completion of this request: - The", "and storage resources to be used by Cloud Spanner databases.", "to their pre-request values. The operation is guaranteed to succeed", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "instance already exists, ``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately upon completion of", "if \"get_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method(", "License, Version 2.0 (the \"License\"); # you may not use", "client_options.api_endpoint: api_endpoint = client_options.api_endpoint # Instantiate the transport. # The", "one page at a time >>> for page in client.list_instances(parent).pages:", "The name of the project for which a list of", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists all instances in the", "resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets the access", "request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) if metadata is", "must always include the instance name. Otherwise, only fields mentioned", "retry object used to retry requests. If ``None`` is specified,", "exclusive with ``credentials``; providing both will raise an exception. credentials", "Cloud IAM. If a dict is provided, it must be", "metadata=None, ): \"\"\" Gets information about a particular instance. Example:", "def instance_config_path(cls, project, instance_config): \"\"\"Return a fully-qualified instance_config string.\"\"\" return", "of the service.\"\"\" # The name of the interface for", "client. API Endpoint should be set through client_options. \"\"\" #", "``name:HOWL`` --> Equivalent to above. - ``NAME:howl`` --> Equivalent to", "If field_mask is present, specifies the subset of ``Instance`` fields", "(Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to be applied to", "to ascertain the credentials from the environment. This argument is", "[] metadata = list(metadata) try: routing_header = [(\"resource\", resource)] except", "be ``<parent>/instances/<instance_id>``. If a dict is provided, it must be", ">>> # Iterate over results one page at a time", "'*' or 'storage.*') are not allowed. For more information see", "# -*- coding: utf-8 -*- # # Copyright 2020 Google", "\"howl\" and it has the label \"env\" with its value", "Authorization requires ``spanner.instances.setIamPolicy`` on ``resource``. Example: >>> from google.cloud import", "``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately upon completion of this request: -", "the containing Google Cloud Project. Otherwise returns an empty set", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout,", "be retried using a default configuration. timeout (Optional[float]): The amount", "resource types for which a decrease in the instance's allocation", "response = client.test_iam_permissions(resource, permissions) Args: resource (str): REQUIRED: The resource", "google.oauth2 import service_account import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import", "the License for the specific language governing permissions and #", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None,", "mutually exclusive with ``credentials``; providing both will raise an exception.", "the label \"env\". - ``labels.env:dev`` --> The instance has the", "`policy`: >>> policy = {} >>> >>> response = client.set_iam_policy(resource,", "instance resource. Attempting this RPC on a non-existent Cloud Spanner", "RPC on a non-existent Cloud Spanner instance resource will result", "retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return", "default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is", "to create. The name may be omitted, but if specified", "instance to be deleted. Values are of the form ``projects/<project>/instances/<instance>``", "of the requested instance. Values are of the form ``projects/<project>/instances/<instance>``.", ":class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "list(metadata) try: routing_header = [(\"resource\", resource)] except AttributeError: pass else:", "the API continues to give the pre-request resource levels. Upon", "next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) #", "completion of the returned operation: - Billing for all successfully-allocated", "second argument. channel (grpc.Channel): DEPRECATED. A ``Channel`` instance through which", "method to add retry and timeout logic. if \"get_instance\" not", "and timeout for each RPC # from the client configuration.", "eligible for filtering are: - ``name`` - ``display_name`` - ``labels.key``", "retry attempts failed. ValueError: If the parameters are invalid. \"\"\"", "created in the instance. - The instance's allocated resource levels", "default info will be used. Generally, you only need to", "making the API calls. The default transport uses the gRPC", "create the instance. Values are of the form ``projects/<project>``. instance_id", "routing_header = [(\"resource\", resource)] except AttributeError: pass else: routing_metadata =", "The resource for which the policy is being requested. See", "empty set of permissions. Example: >>> from google.cloud import spanner_admin_instance_v1", "(Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which must always", "cls(*args, **kwargs) from_service_account_json = from_service_account_file @classmethod def instance_path(cls, project, instance):", "self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info,", "client.get_iam_policy(resource) Args: resource (str): REQUIRED: The resource for which the", "timeout=timeout, metadata=metadata ) def test_iam_permissions( self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "operation is guaranteed to succeed at undoing all resource changes,", "a fully-qualified instance_config string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, )", "Additional arguments to pass to the constructor. Returns: InstanceAdminClient: The", "\"\"\"Return a fully-qualified instance string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance,", "sending data to the service. if transport: if callable(transport): self.transport", "requests will be retried using a default configuration. timeout (Optional[float]):", "= client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `instance_id`: >>> instance_id", "metadata=None, ): \"\"\" Gets information about a particular instance configuration.", "= {} # Service calls def create_instance( self, parent, instance_id,", "self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_", ">>> >>> def callback(operation_future): ... # Handle result. ... result", "# TODO: Initialize `field_mask`: >>> field_mask = {} >>> >>>", "applied to the ``resource``. The size of the policy is", "``CreateInstanceMetadata``. The ``response`` field type is ``Instance``, if successful. Example:", "The transport is responsible for handling serialization and # deserialization", "for this client. This is the key used to #", "page at a time >>> for page in client.list_instance_configs(parent).pages: ...", "The instance to update, which must always include the instance", "identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between", "timeout=timeout, metadata=metadata ) def set_iam_policy( self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "# distributed under the License is distributed on an \"AS", "resource levels are readable via the API. The returned ``long-running", "particular instance configuration. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "permissions = [] >>> >>> response = client.test_iam_permissions(resource, permissions) Args:", "Required. The instance to update, which must always include the", "# Unless required by applicable law or agreed to in", "policy = {} >>> >>> response = client.set_iam_policy(resource, policy) Args:", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists all instances in the given", "not exist, returns ``NOT_FOUND``. Immediately upon completion of this request:", "from google.cloud.spanner_admin_instance_v1.gapic import enums from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports", ") return iterator def get_instance( self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "A transport instance, responsible for actually making the API calls.", "for this field. permissions (list[str]): The set of permissions to", "def from_service_account_file(cls, filename, *args, **kwargs): \"\"\"Creates an instance of this", "import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto", "= google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name)", "def project_path(cls, project): \"\"\"Return a fully-qualified project string.\"\"\" return google.api_core.path_template.expand(", "to the method. Returns: A :class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If", "by Cloud Spanner databases. Each instance has a \"configuration\", which", "for an instance resource. Returns an empty policy if an", "A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "value for this field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions``", "= google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource,", "failed due to a retryable error and retry attempts failed.", "metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`", "results >>> for element in client.list_instances(parent): ... # process element", "need be included. If a dict is provided, it must", "the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig`", "Spanner serving and storage resources to be used by Cloud", "be used to track the instance modification. The ``metadata`` field", "the ``resource``. The size of the policy is limited to", "instance configurations is requested. Values are of the form ``projects/<project>``.", "the Apache License, Version 2.0 (the \"License\"); # you may", "immediately and irrevocably disappear from the API. All data in", "functions. # These are the actual callables which invoke the", "by clients that do not know about them. If a", "instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto import", "logic. if \"test_iam_permissions\" not in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ] =", "timeout=timeout, metadata=metadata ) def get_iam_policy( self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "(Union[dict, google.api_core.client_options.ClientOptions]): Client options used to set user options on", "instance_config): \"\"\"Return a fully-qualified instance_config string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project,", "should be returned. If absent, all ``Instance`` fields are returned.", "if client_options: if type(client_options) == dict: client_options = google.api_core.client_options.from_dict( client_options", "``spanner.instances.update`` permission on resource ``name``. Example: >>> from google.cloud import", ") request = iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ ) if metadata is", "billing is based on the instances that exist and their", "= \"spanner.googleapis.com:443\" \"\"\"The default address of the service.\"\"\" # The", "both a transport instance and \" \"credentials; these are mutually", "on resource ``name``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "can affect each other. For example, if one database in", "are available for other databases in that instance, and their", "Google Cloud Project. Otherwise returns an empty set of permissions.", "logic. if \"list_instances\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ] =", "transport: if callable(transport): self.transport = transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, )", "iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can also iterate over the", ":class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry", "The fields eligible for filtering are: - ``name`` - ``display_name``", "not in self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry,", "at undoing all resource changes, after which point it terminates", ") metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def", "but certain Cloud Platform services (such as Projects) might reject", "for filtering the results of the request. Filter rules are", "metadata=metadata ) def test_iam_permissions( self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "is deprecated.\", PendingDeprecationWarning, stacklevel=2, ) else: client_config = instance_admin_client_config.config if", "\"The `channel` argument is deprecated; use \" \"`transport` instead.\", PendingDeprecationWarning,", "fully-qualified project string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\", project=project ) def __init__(", "failed. ValueError: If the parameters are invalid. \"\"\" # Wrap", "metadata = response.metadata() Args: parent (str): Required. The name of", "requested, billing is based on the newly-requested level. Until completion", "attach to requests. These credentials identify this application to the", "the instance. The ``metadata`` field type is ``CreateInstanceMetadata``. The ``response``", "Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You", "spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "response = client.get_instance_config(name) Args: name (str): Required. The name of", "(Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create. The name may", "google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if", "add retry and timeout logic. if \"list_instances\" not in self._inner_api_calls:", "instance to ``transport``; doing so will raise an exception. client_config", "Otherwise returns an empty set of permissions. Example: >>> from", "page at a time >>> for page in client.list_instances(parent).pages: ...", "is based on the newly-requested level. Until completion of the", "disappear from the API. All data in the databases is", "if \"test_iam_permissions\" not in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method(", "`*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(", "client.list_instance_configs(parent): ... # process element ... pass >>> >>> >>>", "self._inner_api_calls[ \"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, )", "- ``display_name`` - ``labels.key`` where key is the name of", "metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return", "A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can", "instance to update, which must always include the instance name.", "``Instance`` from being erased accidentally by clients that do not", "credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @classmethod def instance_path(cls,", "be set through client_options. \"\"\" # Raise deprecation warnings for", "str]]]): Additional metadata that is provided to the method. Raises:", "to add retry and timeout logic. if \"get_instance_config\" not in", "metadata=None, ): \"\"\" Sets the access control policy on an", "which invoke the proper # transport methods, wrapped with `wrap_method`", ") request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is None: metadata =", "newly-reserved resources are available for serving the instance's tables. -", "time >>> for page in client.list_instances(parent).pages: ... for element in", "spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size ) if metadata is None: metadata =", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about a particular instance.", "an instance receives a lot of requests and consumes most", "via the API continues to give the pre-request resource levels.", "Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "of the instance to be deleted. Values are of the", "logic. if \"create_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ] =", "message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "argument is mutually exclusive with providing a transport instance to", "The resource for which the policy detail is being requested.", "metadata=metadata ) def list_instances( self, parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "method to add retry and timeout logic. if \"create_instance\" not", "in an instance receives a lot of requests and consumes", "has the label \"env\". - ``labels.env:dev`` --> The instance has", "sizes. After an instance exists, there are no additional per-database", "callable(transport): self.transport = transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else: if", "reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable", "Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport instance, responsible for actually making", "from the environment. This argument is mutually exclusive with providing", "state becomes ``READY``. The returned ``long-running operation`` will have a", "empty policy is a valid policy but certain Cloud Platform", "results of the request. Filter rules are case insensitive. The", "on resource availability. Cloud Spanner billing is based on the", "we want to go away. if client_config is not None:", "same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A", "a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "requires ``spanner.instances.getIamPolicy`` on ``resource``. Example: >>> from google.cloud import spanner_admin_instance_v1", "policy detail is being requested. See the operation documentation for", "types may have lower than the requested levels). - Databases", "successfully-allocated resources (some types may have lower than the requested", "under the License is distributed on an \"AS IS\" BASIS,", "on an instance resource. Replaces any existing policy. Authorization requires", "of resources contained in the underlying API response. If page", "the project in which to create the instance. Values are", "all successfully-allocated resources (some types may have lower than the", "permission on the containing Google Cloud Project. Otherwise returns an", "not in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry,", "same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A", "configurations is requested. Values are of the form ``projects/<project>``. page_size", "{} >>> >>> response = client.create_instance(parent, instance_id, instance) >>> >>>", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets the access control policy", "= google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.GetIamPolicyRequest(", "have a name of the format ``<instance_name>/operations/<operation_id>`` and can be", "in self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout,", "name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about a", "\"\"\" Gets information about a particular instance configuration. Example: >>>", "google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport, ) from", "underlying API response. If page streaming is performed per- resource,", ">>> for page in client.list_instances(parent).pages: ... for element in page:", "**kwargs): \"\"\"Creates an instance of this client using the provided", "resource = '' >>> >>> # TODO: Initialize `policy`: >>>", "page in client.list_instances(parent).pages: ... for element in page: ... #", "string \"howl\". - ``name:HOWL`` --> Equivalent to above. - ``NAME:howl``", "you only need to set this if you're developing your", "A mask specifying which fields in ``Instance`` should be updated.", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request =", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata", "a time >>> for page in client.list_instance_configs(parent).pages: ... for element", "'[INSTANCE_CONFIG]') >>> >>> response = client.get_instance_config(name) Args: name (str): Required.", "pass Args: parent (str): Required. The name of the project", "TODO: Initialize `instance`: >>> instance = {} >>> >>> response", "the License. \"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import functools import", "'' >>> >>> # TODO: Initialize `policy`: >>> policy =", "of the requested instance configuration. Values are of the form", "value for this field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete", "name. - ``name:Howl`` --> The instance's name contains the string", "instance. The instance name is assigned by the caller. If", "The ``metadata`` field type is ``UpdateInstanceMetadata``. The ``response`` field type", "... # process element ... pass Args: parent (str): Required.", "- ``name:howl labels.env:dev`` --> The instance's name contains \"howl\" and", "is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "# TODO: Initialize `instance`: >>> instance = {} >>> >>>", "This field is only used by Cloud IAM. If a", "if client_options.api_endpoint: api_endpoint = client_options.api_endpoint # Instantiate the transport. #", ">>> # Handle metadata. >>> metadata = response.metadata() Args: parent", "filtering are: - ``name`` - ``display_name`` - ``labels.key`` where key", "routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry,", "is only used by Cloud IAM. If a dict is", "lower than the requested levels). - All newly-reserved resources are", "are specified, the client will attempt to ascertain the credentials", "to give the pre-request resource levels. Upon completion of the", "client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>> response = client.get_instance_config(name) Args: name (str):", "is present, specifies the subset of ``Instance`` fields that should", "timeout logic. if \"get_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ]", "The ID of the instance to create. Valid identifiers are", "( instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import", "import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation", "allocated resource levels are readable via the API. - The", "to ``GetIamPolicy``. This field is only used by Cloud IAM.", "resource will result in a NOT_FOUND error if the user", "of its databases* immediately and irrevocably disappear from the API.", "resource, this parameter does not affect the return value. If", "the constructor. kwargs: Additional arguments to pass to the constructor.", "(google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string", ") request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is None: metadata =", "message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "is guaranteed to succeed at undoing all resource changes, after", "for a given project. Example: >>> from google.cloud import spanner_admin_instance_v1", "Args: name (str): Required. The name of the instance to", "wait for the request to complete. Note that if ``retry``", ":class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields", "be deleted. Values are of the form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]):", "receives a lot of requests and consumes most of the", "from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import options_pb2 from google.iam.v1", "``name``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "an instance. Immediately upon completion of the request: - Billing", "default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else: if credentials: raise ValueError( \"Received both", "self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Returns permissions", "A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc", "policy) Args: resource (str): REQUIRED: The resource for which the", "for filtering are: - ``name`` - ``display_name`` - ``labels.key`` where", "ANY KIND, either express or implied. # See the License", "are no additional per-database or per-operation charges for use of", ">>> >>> client.delete_instance(name) Args: name (str): Required. The name of", "specified. See the operation documentation for the appropriate value for", "to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that", "the License. # You may obtain a copy of the", "# file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME]", "tables. - The instance's new resource levels are readable via", "self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists the", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `resource`: >>> resource =", "must always be specified; this prevents any future fields in", "# See the License for the specific language governing permissions", "contains the string \"dev\". - ``name:howl labels.env:dev`` --> The instance's", "return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance, ) @classmethod def instance_config_path(cls, project,", "language governing permissions and # limitations under the License. \"\"\"Accesses", "``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", ") request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) if metadata is", "a default configuration. timeout (Optional[float]): The amount of time, in", "Soon afterward: - The instance and *all of its databases*", "on ``resource``. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "will not affect other instances. However, within an instance databases", "the new instance. The instance name is assigned by the", "key json file. args: Additional arguments to pass to the", "begins (some types may have lower than the requested levels).", "error and retry attempts failed. ValueError: If the parameters are", "method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "filter=filter_ ) if metadata is None: metadata = [] metadata", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask ) if metadata", "options used to set user options on the client. API", "one instance will not affect other instances. However, within an", "= '' >>> >>> # TODO: Initialize `instance`: >>> instance", "label Some examples of using filters are: - ``name:*`` -->", "retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata,", "of KB. An empty policy is a valid policy but", ":class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can also iterate over the pages of", "\"env\". - ``labels.env:dev`` --> The instance has the label \"env\"", "policy is a valid policy but certain Cloud Platform services", "becomes ``READY``. The returned ``long-running operation`` will have a name", "to track the progress of updating the instance. If the", "response_token_field=\"next_page_token\", ) return iterator def get_instance( self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT,", "Authorization requires ``spanner.instances.getIamPolicy`` on ``resource``. Example: >>> from google.cloud import", "Returns an empty policy if an instance exists but does", "- Reading the instance via the API continues to give", "allowed. For more information see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]):", "= spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if metadata is None: metadata", "streaming is performed per- resource, this parameter does not affect", "self.transport = transport else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials", "returns ``NOT_FOUND``. Immediately upon completion of this request: - For", "[(\"resource\", resource)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header", "try: routing_header = [(\"instance.name\", instance.name)] except AttributeError: pass else: routing_metadata", "within an instance databases can affect each other. For example,", "user options on the client. API Endpoint should be set", "metadata = response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance", "name (str): Required. The name of the requested instance configuration.", "if callable(transport): self.transport = transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else:", "a particular instance. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "If the request failed due to a retryable error and", "fields are returned. If a dict is provided, it must", "(Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None``", "argument may also be a callable which returns a transport", "client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save a dictionary of cached API call", "instance (though there may be additional network bandwidth charges). Instances", "retry requests. If ``None`` is specified, requests will be retried", "will be sent the credentials as the first argument and", "client library. client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client options used to set", "``Instance``, if successful. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from", "field type is ``Instance``, if successful. Example: >>> from google.cloud", "Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "= [] >>> >>> response = client.test_iam_permissions(resource, permissions) Args: resource", "result in a NOT_FOUND error if the user has ``spanner.instances.list``", "iterator def get_instance( self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "metadata = list(metadata) try: routing_header = [(\"parent\", parent)] except AttributeError:", "Gets information about a particular instance. Example: >>> from google.cloud", "have lower than the requested levels). - All newly-reserved resources", "prevents any future fields in ``Instance`` from being erased accidentally", ">>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `resource`:", "value. If page streaming is performed per-page, this determines the", "{} # Service calls def create_instance( self, parent, instance_id, instance,", "# Service calls def create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT,", "if client_config is not None: warnings.warn( \"The `client_config` argument is", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request =", "``long-running operation`` can be used to track the progress of", "field_mask is present, specifies the subset of ``Instance`` fields that", "has ``spanner.instances.list`` permission on the containing Google Cloud Project. Otherwise", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "= list(metadata) try: routing_header = [(\"instance.name\", instance.name)] except AttributeError: pass", "preparing the new instance. The instance name is assigned by", "which must always include the instance name. Otherwise, only fields", "one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save a dictionary", "client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `instance_id`: >>> instance_id =", "exception. client_config (dict): DEPRECATED. A dictionary of call options for", "changes, after which point it terminates with a ``CANCELLED`` status.", "Permissions with wildcards (such as '*' or 'storage.*') are not", "the policy is being specified. See the operation documentation for", "of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object", "in self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout,", "writing, software # distributed under the License is distributed on", "the appropriate value for this field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL:", "# process element ... pass Args: parent (str): Required. The", "a list of supported instance configurations is requested. Values are", ") def update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "Spanner billing is based on the instances that exist and", "track the progress of preparing the new instance. The instance", "will have a name of the format ``<instance_name>/operations/<operation_id>`` and can", "and retry attempts failed. ValueError: If the parameters are invalid.", "per-database or per-operation charges for use of the instance (though", "resource changes, after which point it terminates with a ``CANCELLED``", "the instance immediately unreadable via the API. - The instance", "client_config = instance_admin_client_config.config if channel: warnings.warn( \"The `channel` argument is", "there may be additional network bandwidth charges). Instances offer isolation:", "timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional", "``name:Howl`` --> The instance's name contains the string \"howl\". -", "- Billing ceases for all of the instance's reserved resources.", "the defaults specified in the `*_config.py` # file next to", ") def list_instances( self, parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "logic. if \"set_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ] =", "warnings from google.oauth2 import service_account import google.api_core.client_options import google.api_core.gapic_v1.client_info import", "an instance exists, there are no additional per-database or per-operation", "set of permissions to check for the ``resource``. Permissions with", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout,", "which a decrease in the instance's allocation has been requested,", "default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if", "and timeout logic. if \"create_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"create_instance\"", "routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata", "under the License. \"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import functools", "limitations under the License. \"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\" import", "Cloud Spanner instance are located (e.g., US-central, Europe). Configurations are", "be applied to the ``resource``. The size of the policy", "All data in the databases is permanently deleted. Example: >>>", "API response. If page streaming is performed per- resource, this", "is provided to the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An", "resource levels are readable via the API. - The instance's", "of call options for each method. If not specified, the", "metadata's ``cancel_time``, and begins restoring resources to their pre-request values.", "(list[str]): The set of permissions to check for the ``resource``.", ">>> instance_id = '' >>> >>> # TODO: Initialize `instance`:", "\"projects/{project}\", project=project ) def __init__( self, transport=None, channel=None, credentials=None, client_config=None,", "~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields in ``Instance`` should", "providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization", "examples of using filters are: - ``name:*`` --> The instance", "existing policy. Authorization requires ``spanner.instances.setIamPolicy`` on ``resource``. Example: >>> from", "length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create. The", "to create. Valid identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and", "The name may be omitted, but if specified must be", "levels are readable via the API. The returned ``long-running operation``", "the form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A retry object used to", "levels. Upon completion of the returned operation: - Billing begins", "page_size=page_size ) if metadata is None: metadata = [] metadata", "raise an exception. client_config (dict): DEPRECATED. A dictionary of call", "empty policy if an instance exists but does not have", "Args: filename (str): The path to the service account private", "it to begin serving. The returned ``long-running operation`` can be", "Alternatively: >>> >>> # Iterate over results one page at", "request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) if metadata is None:", "google.api_core.operations_v1 import google.api_core.page_iterator import google.api_core.path_template import grpc from google.cloud.spanner_admin_instance_v1.gapic import", "for element in client.list_instances(parent): ... # process element ... pass", "know about them. If a dict is provided, it must", "each method. If not specified, the default configuration is used.", "are mutually exclusive.\" ) self.transport = transport else: self.transport =", "client.test_iam_permissions(resource, permissions) Args: resource (str): REQUIRED: The resource for which", "will be retried using a default configuration. timeout (Optional[float]): The", "default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used", "\"dev\". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "__init__( self, transport=None, channel=None, credentials=None, client_config=None, client_info=None, client_options=None, ): \"\"\"Constructor.", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Deletes an instance. Immediately upon", "The name of the project in which to create the", "*args, **kwargs): \"\"\"Creates an instance of this client using the", "used to send a user-agent string along with API requests.", "will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to", "client.set_iam_policy(resource, policy) Args: resource (str): REQUIRED: The resource for which", "): \"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport", "returned ``long-running operation`` will have a name of the format", "database in an instance receives a lot of requests and", "one page at a time >>> for page in client.list_instance_configs(parent).pages:", "are readable via the API. The returned ``long-running operation`` will", ") metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def", "(str): Required. The ID of the instance to create. Valid", "(str): Required. The name of the requested instance. Values are", "the requested levels). - All newly-reserved resources are available for", "name (str): Required. The name of the requested instance. Values", "self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask", "fields mentioned in ``field_mask`` need be included. If a dict", "metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def set_iam_policy( self,", "The instance's name contains the string \"howl\". - ``name:HOWL`` -->", "same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A", "from the client configuration. # (Ordinarily, these are the defaults", "appropriate value for this field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The", "which point it terminates with a ``CANCELLED`` status. - All", "the client. API Endpoint should be set through client_options. \"\"\"", "of supported instance configurations is requested. Values are of the", "deserialization and actually sending data to the service. if transport:", "maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A", "be returned. If absent, all ``Instance`` fields are returned. If", "metadata=metadata, ), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def", "functools import pkg_resources import warnings from google.oauth2 import service_account import", "element ... pass >>> >>> >>> # Alternatively: >>> >>>", ">>> >>> # TODO: Initialize `field_mask`: >>> field_mask = {}", "add retry and timeout logic. if \"get_instance_config\" not in self._inner_api_calls:", "deleted. Values are of the form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A", "if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else:", "field_mask) >>> >>> def callback(operation_future): ... # Handle result. ...", "instance_path(cls, project, instance): \"\"\"Return a fully-qualified instance string.\"\"\" return google.api_core.path_template.expand(", "this field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for", "and actually sending data to the service. if transport: if", ") request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) if metadata", "iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata is None: metadata = [] metadata", "= list(metadata) try: routing_header = [(\"name\", name)] except AttributeError: pass", "be updated. The field mask must always be specified; this", "the returned operation: - Cancelling the operation renders the instance", "default configuration. timeout (Optional[float]): The amount of time, in seconds,", "user-agent string along with API requests. If ``None``, then default", "instance has the label \"env\". - ``labels.env:dev`` --> The instance", "to wait for the request to complete. Note that if", "metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata,", "request, retry=retry, timeout=timeout, metadata=metadata ) def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT,", "(though there may be additional network bandwidth charges). Instances offer", "list(metadata) try: routing_header = [(\"parent\", parent)] except AttributeError: pass else:", "where key is the name of a label Some examples", "from google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2", "= google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request,", "instance): \"\"\"Return a fully-qualified instance string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project,", "protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to", "of cached API call functions. # These are the actual", "instance.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header )", "[] metadata = list(metadata) try: routing_header = [(\"instance.name\", instance.name)] except", "if channel: warnings.warn( \"The `channel` argument is deprecated; use \"", "are rejected. - Reading the instance via the API continues", "= spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size ) if metadata is None: metadata", "and can be used to track the instance modification. The", "is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises:", "of the form ``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is", "import ( instance_admin_grpc_transport, ) from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto", "limited to a few 10s of KB. An empty policy", ":class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can also iterate over the pages of", "Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You", "client configuration. # (Ordinarily, these are the defaults specified in", "exist and their sizes. After an instance exists, there are", "and timeout logic. if \"test_iam_permissions\" not in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\"", "(Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present, specifies the subset of", "request: - For resource types for which a decrease in", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "(str): Required. The name of the project in which to", "This argument is mutually exclusive with ``credentials``; providing both will", "page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists the supported instance", ">>> instance = {} >>> >>> # TODO: Initialize `field_mask`:", "= instance_admin_client_config.config if channel: warnings.warn( \"The `channel` argument is deprecated;", "instance=instance, ) @classmethod def instance_config_path(cls, project, instance_config): \"\"\"Return a fully-qualified", "defaults specified in the `*_config.py` # file next to this", "rejected. Upon completion of the returned operation: - Billing for", "of the instance resources, fewer resources are available for other", "returned operation: - Billing begins for all successfully-allocated resources (some", "def list_instances( self, parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_ ) if", ">>> response = client.test_iam_permissions(resource, permissions) Args: resource (str): REQUIRED: The", "suffer. \"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The default address of the", "method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\",", "self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def delete_instance( self, name,", "is the key used to # find the method configuration", "Values are of the form ``projects/<project>``. instance_id (str): Required. The", "instance are rejected. Upon completion of the returned operation: -", "instance string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance, ) @classmethod def", ") self.transport = transport else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel,", "class as the second argument. channel (grpc.Channel): DEPRECATED. A ``Channel``", "): \"\"\" Gets the access control policy for an instance", "supported instance configurations for a given project. Example: >>> from", "is not None: warnings.warn( \"The `client_config` argument is deprecated.\", PendingDeprecationWarning,", "parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Creates an", "will raise an exception. client_config (dict): DEPRECATED. A dictionary of", "retry=retry, timeout=timeout, metadata=metadata ) def set_iam_policy( self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT,", "Callables will be sent the credentials as the first argument", "to add retry, # timeout, and the like. self._inner_api_calls =", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"](", "in page: ... # process element ... pass Args: parent", "the supported instance configurations for a given project. Example: >>>", "add retry and timeout logic. if \"create_instance\" not in self._inner_api_calls:", "(str): The path to the service account private key json", "available for serving the instance's tables. - The instance's new", "and timeout logic. if \"get_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance\"", "permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Returns permissions that the", "google.api_core.path_template.expand( \"projects/{project}\", project=project ) def __init__( self, transport=None, channel=None, credentials=None,", "are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2", "policy to be applied to the ``resource``. The size of", ") api_endpoint = self.SERVICE_ADDRESS if client_options: if type(client_options) == dict:", "that is provided to the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance.", "gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info #", "The instance's new resource levels are readable via the API.", "requested instance. Values are of the form ``projects/<project>/instances/<instance>``. field_mask (Union[dict,", "= spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask ) if metadata is None: metadata", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "library. client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client options used to set user", "the request failed due to a retryable error and retry", "google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import", "be of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy`", "self._inner_api_calls[ \"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, )", "client.delete_instance(name) Args: name (str): Required. The name of the instance", "retry and timeout logic. if \"get_instance\" not in self._inner_api_calls: self._inner_api_calls[", "response. If page streaming is performed per- resource, this parameter", "to complete. Note that if ``retry`` is specified, the timeout", "timeout logic. if \"get_instance_config\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ]", "instead.\", PendingDeprecationWarning, stacklevel=2, ) api_endpoint = self.SERVICE_ADDRESS if client_options: if", "# Parse out the default settings for retry and timeout", "the API. - The instance can be deleted. - All", "of instances is requested. Values are of the form ``projects/<project>``.", "``response`` field type is ``Instance``, if successful. Authorization requires ``spanner.instances.update``", "used to track creation of the instance. The ``metadata`` field", "metadata = list(metadata) try: routing_header = [(\"instance.name\", instance.name)] except AttributeError:", "to the service. if transport: if callable(transport): self.transport = transport(", "Returns: InstanceAdminClient: The constructed client. \"\"\" credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"]", "no additional per-database or per-operation charges for use of the", "the transport. # The transport is responsible for handling serialization", "error if the user has ``spanner.instances.list`` permission on the containing", "and begins allocating or releasing resources as requested. The returned", "resources for the Cloud Spanner instance are located (e.g., US-central,", "the API. - The instance's state becomes ``READY``. The returned", ") request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata is None: metadata", "callback(operation_future): ... # Handle result. ... result = operation_future.result() >>>", "REQUIRED: The resource for which the policy detail is being", "\"get_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance,", "instance's allocated resource levels are readable via the API. -", "Returns: A :class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These", "= response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to", "to make calls. This argument is mutually exclusive with ``credentials``;", "request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is None: metadata = []", "- ``NAME:howl`` --> Equivalent to above. - ``labels.env:*`` --> The", "timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, )", ") def test_iam_permissions( self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "used to track the progress of updating the instance. If", "Spanner databases. Each instance has a \"configuration\", which dictates where", "A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "specific language governing permissions and # limitations under the License.", "``retry`` is specified, the timeout applies to each individual attempt.", "instance_admin_client_config.config if channel: warnings.warn( \"The `channel` argument is deprecated; use", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Deletes an instance. Immediately upon completion", "``<instance_name>/operations/<operation_id>`` and can be used to track creation of the", "to ``transport``; doing so will raise an exception. client_config (dict):", "authorization credentials to attach to requests. These credentials identify this", "is based on the instances that exist and their sizes.", "requested. The returned ``long-running operation`` can be used to track", "field type is ``Instance``, if successful. Authorization requires ``spanner.instances.update`` permission", "which the policy is being requested. See the operation documentation", "instance's new resource levels are readable via the API. The", "project string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\", project=project ) def __init__( self,", "spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO:", "``projects/<project>``. page_size (int): The maximum number of resources contained in", "page_size=page_size, filter=filter_ ) if metadata is None: metadata = []", "returns an empty set of permissions. Example: >>> from google.cloud", "# Handle metadata. >>> metadata = response.metadata() Args: parent (str):", "calls. The default transport uses the gRPC protocol. This argument", "operation`` can be used to track the progress of preparing", "else: client_config = instance_admin_client_config.config if channel: warnings.warn( \"The `channel` argument", "google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs( self, parent,", "is None: metadata = [] metadata = list(metadata) try: routing_header", "future fields in ``Instance`` from being erased accidentally by clients", "metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def get_iam_policy(", "form ``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present, specifies", "resource=resource, options=options_ ) if metadata is None: metadata = []", "instance's name contains \"howl\" and it has the label \"env\"", "self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if", "# you may not use this file except in compliance", "of the label contains the string \"dev\". - ``name:howl labels.env:dev``", "invoke the proper # transport methods, wrapped with `wrap_method` to", "``cancel_time``, and begins restoring resources to their pre-request values. The", "the form ``projects/<project>``. instance_id (str): Required. The ID of the", "handling serialization and # deserialization and actually sending data to", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"](", "request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance( self,", "form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry", "the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`", "constructed client. \"\"\" credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials return", ">>> >>> # Alternatively: >>> >>> # Iterate over results", "= client_info # Parse out the default settings for retry", "not in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry,", "serving resources for the Cloud Spanner instance are located (e.g.,", "= google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.UpdateInstanceRequest(", "method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances.", "Values are of the form ``projects/<project>/instances/<instance>`` retry (Optional[google.api_core.retry.Retry]): A retry", "can be used to track the progress of preparing the", "is ``Instance``, if successful. Authorization requires ``spanner.instances.update`` permission on resource", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"test_iam_permissions\"]( request, retry=retry,", "self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT,", "as Projects) might reject them. If a dict is provided,", "empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object):", "label \"env\". - ``labels.env:dev`` --> The instance has the label", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # Iterate", "ascertain the credentials from the environment. This argument is mutually", "TODO: Initialize `policy`: >>> policy = {} >>> >>> response", "of the interface for this client. This is the key", "field. policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to be", "client_options=None, ): \"\"\"Constructor. Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A", "If the parameters are invalid. \"\"\" # Wrap the transport", "named instance already exists, ``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately upon completion", "try: routing_header = [(\"parent\", parent)] except AttributeError: pass else: routing_metadata", "self._inner_api_calls[ \"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, )", "Service calls def create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "is provided to the method. Returns: A :class:`~google.api_core.operation.Operation` instance. Raises:", "are case insensitive. The fields eligible for filtering are: -", "format ``<instance_name>/operations/<operation_id>`` and can be used to track the instance", "= spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is None: metadata = [] metadata", "== dict: client_options = google.api_core.client_options.from_dict( client_options ) if client_options.api_endpoint: api_endpoint", "instance of this client using the provided credentials file. Args:", "instance's name contains the string \"howl\". - ``name:HOWL`` --> Equivalent", "= [(\"instance.name\", instance.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(", "logic. if \"delete_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\" ] =", "from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from google.iam.v1", "used to track the progress of preparing the new instance.", "\"\"\" Lists the supported instance configurations for a given project.", "OPTIONAL: A ``GetPolicyOptions`` object for specifying options to ``GetIamPolicy``. This", "an instance databases can affect each other. For example, if", "the format ``<instance_name>/operations/<operation_id>`` and can be used to track creation", "- The instance and *all of its databases* immediately and", "the specified instance resource. Attempting this RPC on a non-existent", "characters in length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to", "): \"\"\" Deletes an instance. Immediately upon completion of the", "problems with databases in one instance will not affect other", "the policy is being requested. See the operation documentation for", "self, parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists", "requests. These credentials identify this application to the service. If", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Sets the access control policy on", "transport method to add retry and timeout logic. if \"list_instances\"", "timeout logic. if \"list_instances\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ]", "the caller has on the specified instance resource. Attempting this", "constructor. Returns: InstanceAdminClient: The constructed client. \"\"\" credentials = service_account.Credentials.from_service_account_file(filename)", "logic. if \"list_instance_configs\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ] =", ":class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"set_iam_policy\"]( request,", "\"\"\" Updates an instance, and begins allocating or releasing resources", "under the Apache License, Version 2.0 (the \"License\"); # you", "progress of updating the instance. If the named instance does", "element in client.list_instance_configs(parent): ... # process element ... pass >>>", "value for this field. permissions (list[str]): The set of permissions", "Google based on resource availability. Cloud Spanner billing is based", "client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `instance`: >>>", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request,", "routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def", ">>> for page in client.list_instance_configs(parent).pages: ... for element in page:", "instance resources, fewer resources are available for other databases in", "# Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback)", "any reason. google.api_core.exceptions.RetryError: If the request failed due to a", "name of the instance to be deleted. Values are of", "Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "to add retry and timeout logic. if \"list_instances\" not in", "documentation for the appropriate value for this field. options_ (Union[dict,", "client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `resource`: >>>", "import grpc from google.cloud.spanner_admin_instance_v1.gapic import enums from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config", "if \"update_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method(", "that do not know about them. If a dict is", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata", "method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "Filter rules are case insensitive. The fields eligible for filtering", "google.iam.v1 import policy_pb2 from google.longrunning import operations_pb2 from google.protobuf import", "request failed for any reason. google.api_core.exceptions.RetryError: If the request failed", "(such as Projects) might reject them. If a dict is", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_iam_policy\"]( request,", "instance exists but does not have a policy set. Authorization", "client_info # Parse out the default settings for retry and", "google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from google.iam.v1 import", "control policy for an instance resource. Returns an empty policy", "go away. if client_config is not None: warnings.warn( \"The `client_config`", "a transport instance and \" \"credentials; these are mutually exclusive.\"", "An expression for filtering the results of the request. Filter", "the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]):", "the method configuration in the client_config dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\"", "# Copyright 2020 Google LLC # # Licensed under the", "project for which a list of supported instance configurations is", "``spanner.instances.list`` permission on the containing Google Cloud Project. Otherwise returns", "field type is ``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``,", "mask must always be specified; this prevents any future fields", "metadata = [] metadata = list(metadata) try: routing_header = [(\"parent\",", "method configuration in the client_config dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod", ">>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> client.delete_instance(name) Args:", "): \"\"\" Returns permissions that the caller has on the", "returned operation: - Cancelling the operation renders the instance immediately", "instance has the label \"env\" and the value of the", "to modify the instance are rejected. - Reading the instance", "pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\" Cloud Spanner Instance Admin API The", "Billing ceases for all of the instance's reserved resources. Soon", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "args: Additional arguments to pass to the constructor. kwargs: Additional", "client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future): ... # Handle", "form ``projects/<project>``. page_size (int): The maximum number of resources contained", "result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>>", "transport method to add retry and timeout logic. if \"list_instance_configs\"", "--> The instance has a name. - ``name:Howl`` --> The", ">>> policy = {} >>> >>> response = client.set_iam_policy(resource, policy)", "\"credentials; these are mutually exclusive.\" ) self.transport = transport else:", "= self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation,", "Attempting this RPC on a non-existent Cloud Spanner instance resource", ">>> # Alternatively: >>> >>> # Iterate over results one", "def __init__( self, transport=None, channel=None, credentials=None, client_config=None, client_info=None, client_options=None, ):", "expression for filtering the results of the request. Filter rules", "self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Deletes an instance.", "releasing resources as requested. The returned ``long-running operation`` can be", "instance and \" \"credentials; these are mutually exclusive.\" ) self.transport", "and the default transport class as the second argument. channel", "a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 #", "new resource levels are readable via the API. The returned", "client_options. \"\"\" # Raise deprecation warnings for things we want", "google.api_core.path_template import grpc from google.cloud.spanner_admin_instance_v1.gapic import enums from google.cloud.spanner_admin_instance_v1.gapic import", "instance, responsible for actually making the API calls. The default", "= google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name)", "KB. An empty policy is a valid policy but certain", "other attempts to modify the instance are rejected. - Reading", "NOT_FOUND error if the user has ``spanner.instances.list`` permission on the", "same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]):", "time >>> for page in client.list_instance_configs(parent).pages: ... for element in", "REQUIRED: The complete policy to be applied to the ``resource``.", "= \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): \"\"\"Creates an", "'' >>> >>> # TODO: Initialize `permissions`: >>> permissions =", "performance may suffer. \"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The default address", "For more information see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A", "size of the policy is limited to a few 10s", "do not know about them. If a dict is provided,", "google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)", ") def list_instance_configs( self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "grpc from google.cloud.spanner_admin_instance_v1.gapic import enums from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from", "requests and consumes most of the instance resources, fewer resources", "client.list_instance_configs(parent).pages: ... for element in page: ... # process element", "the Cloud Spanner instance are located (e.g., US-central, Europe). Configurations", "kwargs[\"credentials\"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @classmethod", "client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instances\", request_token_field=\"page_token\",", "For resource types for which a decrease in the instance's", "one database in an instance receives a lot of requests", "the gRPC protocol. This argument may also be a callable", "instance=instance ) if metadata is None: metadata = [] metadata", "project. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "\"get_instance_config\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance_config,", "): \"\"\" Gets information about a particular instance configuration. Example:", "sent the credentials as the first argument and the default", ">>> instance = {} >>> >>> response = client.create_instance(parent, instance_id,", "= spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `resource`: >>> resource", "transport instance, responsible for actually making the API calls. The", "= spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_ ) if metadata is None:", "provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError:", "the client configuration. # (Ordinarily, these are the defaults specified", "for element in page: ... # process element ... pass", "name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> client.delete_instance(name) Args: name (str):", "are of the form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A retry object", "= google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstancesRequest(", "from the API. All data in the databases is permanently", "= [(\"parent\", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(", "routing_header = [(\"instance.name\", instance.name)] except AttributeError: pass else: routing_metadata =", "detail is being requested. See the operation documentation for the", "level. Until completion of the returned operation: - Cancelling the", "Reading the instance via the API continues to give the", ">>> metadata = response.metadata() Args: parent (str): Required. The name", "[(\"instance.name\", instance.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header", "[(\"parent\", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header", "operation sets its metadata's ``cancel_time``, and begins restoring resources to", "all ``Instance`` fields are returned. If a dict is provided,", ") def set_iam_policy( self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "[] >>> >>> response = client.test_iam_permissions(resource, permissions) Args: resource (str):", "exist, returns ``NOT_FOUND``. Immediately upon completion of this request: -", "``<instance_name>/operations/<operation_id>`` and can be used to track the instance modification.", "Upon completion of the returned operation: - Billing begins for", "= google.api_core.client_options.from_dict( client_options ) if client_options.api_endpoint: api_endpoint = client_options.api_endpoint #", "metadata = list(metadata) try: routing_header = [(\"name\", name)] except AttributeError:", "timeout logic. if \"set_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ]", "data to the service. if transport: if callable(transport): self.transport =", "google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource,", "string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance, ) @classmethod def instance_config_path(cls,", "deprecation warnings for things we want to go away. if", ">>> >>> response = client.update_instance(instance, field_mask) >>> >>> def callback(operation_future):", "instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Creates an instance", "Instances offer isolation: problems with databases in one instance will", "are the actual callables which invoke the proper # transport", "the request to complete. Note that if ``retry`` is specified,", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "The name of the interface for this client. This is", "things we want to go away. if client_config is not", "availability. Cloud Spanner billing is based on the instances that", "- All newly-reserved resources are available for serving the instance's", "License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "policy but certain Cloud Platform services (such as Projects) might", "(grpc.Channel): DEPRECATED. A ``Channel`` instance through which to make calls.", "the underlying API response. If page streaming is performed per-", "The returned ``long-running operation`` can be used to track the", "argument is deprecated; use \" \"`transport` instead.\", PendingDeprecationWarning, stacklevel=2, )", "an instance resource. Replaces any existing policy. Authorization requires ``spanner.instances.setIamPolicy``", "attempts failed. ValueError: If the parameters are invalid. \"\"\" #", "the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info", "field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Updates an instance, and", "a given project. Example: >>> from google.cloud import spanner_admin_instance_v1 >>>", "reserved resources. Soon afterward: - The instance and *all of", "API. All data in the databases is permanently deleted. Example:", "and timeout logic. if \"get_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\"", "iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) if metadata is None: metadata =", "readable via the API. The returned ``long-running operation`` will have", "enums from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import ( instance_admin_grpc_transport,", "self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size,", "of a label Some examples of using filters are: -", ":class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "transport method to add retry and timeout logic. if \"set_iam_policy\"", "google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1 import google.api_core.page_iterator import", "will be used. Generally, you only need to set this", "page. filter_ (str): An expression for filtering the results of", "(such as '*' or 'storage.*') are not allowed. For more", "Configurations are created by Google based on resource availability. Cloud", "Cloud Spanner billing is based on the instances that exist", "serving the instance's tables. - The instance's new resource levels", "retry and timeout logic. if \"get_instance_config\" not in self._inner_api_calls: self._inner_api_calls[", "import spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name", "the newly-requested level. Until completion of the returned operation: -", "If absent, all ``Instance`` fields are returned. If a dict", "Args: resource (str): REQUIRED: The resource for which the policy", "google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1 import", "the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]):", "= client.get_iam_policy(resource) Args: resource (str): REQUIRED: The resource for which", "transport method to add retry and timeout logic. if \"get_iam_policy\"", "instance_id (str): Required. The ID of the instance to create.", "(Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport instance, responsible for actually", "metadata=None, ): \"\"\" Lists the supported instance configurations for a", "address=api_endpoint, ) else: if credentials: raise ValueError( \"Received both a", "def get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets", "``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 characters in", "self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", )", "the label \"env\" and the value of the label contains", "The instance has a name. - ``name:Howl`` --> The instance's", "streaming is performed per-page, this determines the maximum number of", "resources begins (some types may have lower than the requested", "the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata is None: metadata = []", "TODO: Initialize `instance`: >>> instance = {} >>> >>> #", "= spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>>", "The ``response`` field type is ``Instance``, if successful. Authorization requires", "value containing \"dev\". retry (Optional[google.api_core.retry.Retry]): A retry object used to", "types may have lower than the requested levels). - All", "to the constructor. kwargs: Additional arguments to pass to the", "used by Cloud Spanner databases. Each instance has a \"configuration\",", "channel: warnings.warn( \"The `channel` argument is deprecated; use \" \"`transport`", "all of the instance's reserved resources. Soon afterward: - The", "page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists all instances", "credentials identify this application to the service. If none are", "is mutually exclusive with providing a transport instance to ``transport``;", "= operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata.", "All other attempts to modify the instance are rejected. -", "is specified, the timeout applies to each individual attempt. metadata", "by the caller. If the named instance already exists, ``CreateInstance``", "= client.get_instance(name) Args: name (str): Required. The name of the", "name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Deletes an instance. Immediately", "track creation of the instance. The ``metadata`` field type is", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"],", "the progress of updating the instance. If the named instance", "_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\" Cloud Spanner Instance Admin", "file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] )", "undoing all resource changes, after which point it terminates with", ":class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can also", "the return value. If page streaming is performed per-page, this", "AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation", "default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_ )", "for which the policy detail is being requested. See the", "``labels.env:dev`` --> The instance has the label \"env\" and the", "in ``field_mask`` need be included. If a dict is provided,", "a fully-qualified instance string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instances/{instance}\", project=project, instance=instance, )", "API requests. If ``None``, then default info will be used.", "= [(\"name\", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(", "so will raise an exception. client_config (dict): DEPRECATED. A dictionary", "using a default configuration. timeout (Optional[float]): The amount of time,", "spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs( self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent =", "with API requests. If ``None``, then default info will be", "google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if", "have a policy set. Authorization requires ``spanner.instances.getIamPolicy`` on ``resource``. Example:", "retry and timeout logic. if \"set_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[", "return google.api_core.path_template.expand( \"projects/{project}\", project=project ) def __init__( self, transport=None, channel=None,", "= iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ ) if metadata is None: metadata", "~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create. The name may be", "project): \"\"\"Return a fully-qualified project string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\", project=project", "Iterate over results one page at a time >>> for", "self.transport = transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else: if credentials:", "resource=resource, permissions=permissions ) if metadata is None: metadata = []", "the results of the request. Filter rules are case insensitive.", "be specified; this prevents any future fields in ``Instance`` from", "in the underlying API response. If page streaming is performed", ">>> >>> response = client.get_iam_policy(resource) Args: resource (str): REQUIRED: The", "containing Google Cloud Project. Otherwise returns an empty set of", "field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying", "return cls(*args, **kwargs) from_service_account_json = from_service_account_file @classmethod def instance_path(cls, project,", "pkg_resources import warnings from google.oauth2 import service_account import google.api_core.client_options import", "can also iterate over the pages of the response using", "may have lower than the requested levels). - Databases can", "of the project in which to create the instance. Values", "google.api_core.exceptions.RetryError: If the request failed due to a retryable error", "allocating or releasing resources as requested. The returned ``long-running operation``", "in the client_config dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls,", "been requested, billing is based on the newly-requested level. Until", "is ``CreateInstanceMetadata``. The ``response`` field type is ``Instance``, if successful.", "used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a", "own client library. client_options (Union[dict, google.api_core.client_options.ClientOptions]): Client options used to", "), request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance(", "{} >>> >>> response = client.update_instance(instance, field_mask) >>> >>> def", "self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( client_config[\"interfaces\"][self._INTERFACE_NAME] ) # Save a dictionary of", "their sizes. After an instance exists, there are no additional", "google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, ) @classmethod def project_path(cls, project): \"\"\"Return", ") else: if credentials: raise ValueError( \"Received both a transport", "are invalid. \"\"\" # Wrap the transport method to add", "the instance's allocation has been requested, billing is based on", "instance configurations for a given project. Example: >>> from google.cloud", "google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instance_configs\",", "<https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_ ) if metadata is None: metadata", "`resource`: >>> resource = '' >>> >>> # TODO: Initialize", "for serving the instance's tables. - The instance's new resource", "offer isolation: problems with databases in one instance will not", "performed per- resource, this parameter does not affect the return", "or 'storage.*') are not allowed. For more information see `IAM", "else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"](", "permission on resource ``name``. Example: >>> from google.cloud import spanner_admin_instance_v1", "default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) if", ">>> # Iterate over all results >>> for element in", "project=project, instance_config=instance_config, ) @classmethod def project_path(cls, project): \"\"\"Return a fully-qualified", "use this file except in compliance with the License. #", "the first argument and the default transport class as the", "credentials to attach to requests. These credentials identify this application", "in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout,", "default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ ) if", "import google.api_core.operation import google.api_core.operations_v1 import google.api_core.page_iterator import google.api_core.path_template import grpc", "of resources in a page. filter_ (str): An expression for", "if \"get_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method(", "successful. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client", "dict is provided, it must be of the same form", "of the format ``<instance_name>/operations/<operation_id>`` and can be used to track", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request =", "filtering the results of the request. Filter rules are case", "get_instance( self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets", "to add retry and timeout logic. if \"set_iam_policy\" not in", "are readable via the API. - The instance's state becomes", "of the instance. The ``metadata`` field type is ``CreateInstanceMetadata``. The", ") metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "default transport class as the second argument. channel (grpc.Channel): DEPRECATED.", "message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which", "already exists, ``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately upon completion of this", "in client.list_instance_configs(parent).pages: ... for element in page: ... # process", "The name of the requested instance configuration. Values are of", "is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises:", "a valid policy but certain Cloud Platform services (such as", "in that instance, and their performance may suffer. \"\"\" SERVICE_ADDRESS", ">>> >>> response = client.test_iam_permissions(resource, permissions) Args: resource (str): REQUIRED:", "] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs[\"SetIamPolicy\"].retry, default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request =", ") if client_options.api_endpoint: api_endpoint = client_options.api_endpoint # Instantiate the transport.", "logic. if \"update_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"update_instance\" ] =", "\"\"\" credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials return cls(*args, **kwargs)", "options_pb2 from google.iam.v1 import policy_pb2 from google.longrunning import operations_pb2 from", "for the ``resource``. Permissions with wildcards (such as '*' or", "from_service_account_file(cls, filename, *args, **kwargs): \"\"\"Creates an instance of this client", "need to set this if you're developing your own client", "method. Returns: A :class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "``labels.key`` where key is the name of a label Some", "return iterator def get_instance( self, name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "more information see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A retry", "name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>> response = client.get_instance_config(name) Args:", "of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances. You can also iterate over the pages", "= client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name", "in compliance with the License. # You may obtain a", "to pass to the constructor. kwargs: Additional arguments to pass", "in ``Instance`` should be updated. The field mask must always", "import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1 import google.api_core.page_iterator", "from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\"", "want to go away. if client_config is not None: warnings.warn(", "software # distributed under the License is distributed on an", "\"\"\" Deletes an instance. Immediately upon completion of the request:", ">>> >>> >>> # Alternatively: >>> >>> # Iterate over", "databases can affect each other. For example, if one database", "few 10s of KB. An empty policy is a valid", "retry and timeout logic. if \"create_instance\" not in self._inner_api_calls: self._inner_api_calls[", "The resource for which the policy is being specified. See", "resources are available for other databases in that instance, and", "other. For example, if one database in an instance receives", ">>> response = client.get_instance(name) Args: name (str): Required. The name", ") else: client_config = instance_admin_client_config.config if channel: warnings.warn( \"The `channel`", "not affect the return value. If page streaming is performed", "= transport else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials )", "over all results >>> for element in client.list_instance_configs(parent): ... #", "the response using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "using the provided credentials file. Args: filename (str): The path", "requested attributes but no allocated resources. Its state is ``CREATING``.", "their pre-request values. The operation is guaranteed to succeed at", "fewer resources are available for other databases in that instance,", "credentials from the environment. This argument is mutually exclusive with", "policy set. Authorization requires ``spanner.instances.getIamPolicy`` on ``resource``. Example: >>> from", "requires ``spanner.instances.update`` permission on resource ``name``. Example: >>> from google.cloud", "if metadata is None: metadata = [] metadata = list(metadata)", "``metadata`` field type is ``UpdateInstanceMetadata``. The ``response`` field type is", "attempts to modify the instance are rejected. Upon completion of", "the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]):", "google.api_core.client_options.ClientOptions]): Client options used to set user options on the", "If none are specified, the client will attempt to ascertain", "metadata is None: metadata = [] metadata = list(metadata) try:", "absent, all ``Instance`` fields are returned. If a dict is", "\"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request", "``response`` field type is ``Instance``, if successful. Example: >>> from", "metadata = [] metadata = list(metadata) try: routing_header = [(\"name\",", "The authorization credentials to attach to requests. These credentials identify", "The ``response`` field type is ``Instance``, if successful. Example: >>>", "instance. Callables will be sent the credentials as the first", "and their performance may suffer. \"\"\" SERVICE_ADDRESS = \"spanner.googleapis.com:443\" \"\"\"The", "# # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "Initialize `permissions`: >>> permissions = [] >>> >>> response =", "instance has a name. - ``name:Howl`` --> The instance's name", "instance has a \"configuration\", which dictates where the serving resources", "\"\"\" # Wrap the transport method to add retry and", "resources. Its state is ``CREATING``. Until completion of the returned", "for handling serialization and # deserialization and actually sending data", "the instance to create. Valid identifiers are of the form", "the operation sets its metadata's ``cancel_time``, and begins restoring resources", "`instance_id`: >>> instance_id = '' >>> >>> # TODO: Initialize", "metadata = [] metadata = list(metadata) try: routing_header = [(\"instance.name\",", "provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "can be created in the instance. - The instance's allocated", "form ``projects/<project>/instanceConfigs/<config>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry", "information about a particular instance. Example: >>> from google.cloud import", "Gets information about a particular instance configuration. Example: >>> from", "serving and storage resources to be used by Cloud Spanner", "routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry,", "to track the progress of preparing the new instance. The", "``projects/<project>``. instance_id (str): Required. The ID of the instance to", "pass to the constructor. kwargs: Additional arguments to pass to", "self.SERVICE_ADDRESS if client_options: if type(client_options) == dict: client_options = google.api_core.client_options.from_dict(", "the requested instance. Values are of the form ``projects/<project>/instances/<instance>``. field_mask", "can be used to track the instance modification. The ``metadata``", "form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required.", "along with API requests. If ``None``, then default info will", "\"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request", "Initialize `instance`: >>> instance = {} >>> >>> response =", "spanner_instance_admin_pb2_grpc from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import options_pb2 from", "on a non-existent Cloud Spanner instance resource will result in", "(str): Required. The name of the project for which a", "parent (str): Required. The name of the project in which", "**kwargs) from_service_account_json = from_service_account_file @classmethod def instance_path(cls, project, instance): \"\"\"Return", "if successful. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "completion of this request: - For resource types for which", "If the request failed for any reason. google.api_core.exceptions.RetryError: If the", "method to add retry and timeout logic. if \"get_iam_policy\" not", "with the License. # You may obtain a copy of", "operation: - Cancelling the operation sets its metadata's ``cancel_time``, and", "be between 2 and 64 characters in length. instance (Union[dict,", "--> The instance's name contains the string \"howl\". - ``name:HOWL``", "the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used", "(dict): DEPRECATED. A dictionary of call options for each method.", "the instance. Values are of the form ``projects/<project>``. instance_id (str):", "over the pages of the response using its `pages` property.", "LLC # # Licensed under the Apache License, Version 2.0", "the request. Filter rules are case insensitive. The fields eligible", "\"\"\" Gets the access control policy for an instance resource.", "are available for serving the instance's tables. - The instance's", "PendingDeprecationWarning, stacklevel=2, ) else: client_config = instance_admin_client_config.config if channel: warnings.warn(", "self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info,", "= client.project_path('[PROJECT]') >>> >>> # Iterate over all results >>>", "requires ``spanner.instances.setIamPolicy`` on ``resource``. Example: >>> from google.cloud import spanner_admin_instance_v1", "access control policy for an instance resource. Returns an empty", "response.metadata() Args: parent (str): Required. The name of the project", "provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError:", "The path to the service account private key json file.", "on the client. API Endpoint should be set through client_options.", "this request: - For resource types for which a decrease", "self.transport.list_instance_configs, default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size", "retry=retry, timeout=timeout, metadata=metadata ) def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "iterator def get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "decrease in the instance's allocation has been requested, billing is", "exists, ``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately upon completion of this request:", "name=name, field_mask=field_mask ) if metadata is None: metadata = []", "return self._inner_api_calls[\"set_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def get_iam_policy( self,", "to set user options on the client. API Endpoint should", "property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason.", "import iam_policy_pb2 from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2", "permissions=permissions ) if metadata is None: metadata = [] metadata", "\"spanner.googleapis.com:443\" \"\"\"The default address of the service.\"\"\" # The name", "express or implied. # See the License for the specific", "is ``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``, if successful.", "Replaces any existing policy. Authorization requires ``spanner.instances.setIamPolicy`` on ``resource``. Example:", "except in compliance with the License. # You may obtain", "Until completion of the returned operation: - Cancelling the operation", "the user has ``spanner.instances.list`` permission on the containing Google Cloud", "= response.metadata() Args: parent (str): Required. The name of the", "add retry and timeout logic. if \"test_iam_permissions\" not in self._inner_api_calls:", "API calls. The default transport uses the gRPC protocol. This", "import google.api_core.page_iterator import google.api_core.path_template import grpc from google.cloud.spanner_admin_instance_v1.gapic import enums", "spanner_instance_admin_pb2 from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc from google.iam.v1 import iam_policy_pb2 from", "name of the requested instance. Values are of the form", "providing a transport instance to ``transport``; doing so will raise", "default address of the service.\"\"\" # The name of the", "instance and *all of its databases* immediately and irrevocably disappear", "spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) if metadata is None: metadata", "continues to give the pre-request resource levels. Upon completion of", "accidentally by clients that do not know about them. If", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\", response_token_field=\"next_page_token\",", "to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "and irrevocably disappear from the API. All data in the", "client info used to send a user-agent string along with", "is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version =", "However, within an instance databases can affect each other. For", "of the returned operation: - Cancelling the operation sets its", ">>> for element in client.list_instance_configs(parent): ... # process element ...", "number of resources contained in the underlying API response. If", "these are mutually exclusive.\" ) self.transport = transport else: self.transport", "other instances. However, within an instance databases can affect each", "iterate over the pages of the response using its `pages`", "and timeout logic. if \"list_instance_configs\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\"", "labels.env:dev`` --> The instance's name contains \"howl\" and it has", "if one database in an instance receives a lot of", "its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for", "default settings for retry and timeout for each RPC #", "metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata, ) def update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Lists all instances in", "callable which returns a transport instance. Callables will be sent", "instance. Immediately upon completion of the request: - Billing ceases", "as the second argument. channel (grpc.Channel): DEPRECATED. A ``Channel`` instance", "the instance are rejected. - Reading the instance via the", "If the named instance already exists, ``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately", "to track creation of the instance. The ``metadata`` field type", "CONDITIONS OF ANY KIND, either express or implied. # See", "attempts to modify the instance are rejected. - Reading the", "reject them. If a dict is provided, it must be", "and their sizes. After an instance exists, there are no", "identify this application to the service. If none are specified,", "64 characters in length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance", "service account private key json file. args: Additional arguments to", "to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If", "the serving resources for the Cloud Spanner instance are located", "network bandwidth charges). Instances offer isolation: problems with databases in", "configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to", "requested. Values are of the form ``projects/<project>``. page_size (int): The", "str]]]): Additional metadata that is provided to the method. Returns:", "values. The operation is guaranteed to succeed at undoing all", "returned operation: - Billing for all successfully-allocated resources begins (some", "return self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def test_iam_permissions( self,", "to be applied to the ``resource``. The size of the", "for which a list of supported instance configurations is requested.", "_GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings", "routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "A ``GetPolicyOptions`` object for specifying options to ``GetIamPolicy``. This field", ":class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "= list(metadata) try: routing_header = [(\"resource\", resource)] except AttributeError: pass", "retryable error and retry attempts failed. ValueError: If the parameters", "creation of the instance. The ``metadata`` field type is ``CreateInstanceMetadata``.", "will attempt to ascertain the credentials from the environment. This", "the service. If none are specified, the client will attempt", "the transport method to add retry and timeout logic. if", "uses the gRPC protocol. This argument may also be a", "file. Args: filename (str): The path to the service account", "an instance, and begins allocating or releasing resources as requested.", "all resource changes, after which point it terminates with a", ">>> # TODO: Initialize `field_mask`: >>> field_mask = {} >>>", "be included. If a dict is provided, it must be", "spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is None: metadata = [] metadata =", "of using filters are: - ``name:*`` --> The instance has", "instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can also iterate", "other databases in that instance, and their performance may suffer.", "Initialize `instance_id`: >>> instance_id = '' >>> >>> # TODO:", "metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy`", "modify and list instances. Instances are dedicated Cloud Spanner serving", "= client.test_iam_permissions(resource, permissions) Args: resource (str): REQUIRED: The resource for", "the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used", "in seconds, to wait for the request to complete. Note", "google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.gapic_v1.routing_header import", "API, with all requested attributes but no allocated resources. Its", "policy for an instance resource. Returns an empty policy if", "argument. channel (grpc.Channel): DEPRECATED. A ``Channel`` instance through which to", "credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION", "metadata=metadata ) def set_iam_policy( self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"update_instance\"]( request, retry=retry,", "warnings.warn( \"The `channel` argument is deprecated; use \" \"`transport` instead.\",", ":class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "number of resources in a page. filter_ (str): An expression", "actually sending data to the service. if transport: if callable(transport):", "(Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method.", "it has the label \"env\" with its value containing \"dev\".", "fields that should be returned. If absent, all ``Instance`` fields", "metadata = [] metadata = list(metadata) try: routing_header = [(\"resource\",", "instance resource. Replaces any existing policy. Authorization requires ``spanner.instances.setIamPolicy`` on", "Args: parent (str): Required. The name of the project for", "name of the project for which a list of supported", ">>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]')", "warnings.warn( \"The `client_config` argument is deprecated.\", PendingDeprecationWarning, stacklevel=2, ) else:", "timeout logic. if \"test_iam_permissions\" not in self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ]", "# TODO: Initialize `policy`: >>> policy = {} >>> >>>", "with all requested attributes but no allocated resources. Its state", "client_options = google.api_core.client_options.from_dict( client_options ) if client_options.api_endpoint: api_endpoint = client_options.api_endpoint", "be omitted, but if specified must be ``<parent>/instances/<instance_id>``. If a", "using filters are: - ``name:*`` --> The instance has a", "newly-requested level. Until completion of the returned operation: - Cancelling", "google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from google.longrunning import", "gRPC protocol. This argument may also be a callable which", "consumes most of the instance resources, fewer resources are available", "method. If not specified, the default configuration is used. client_info", "is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises:", "response.metadata() Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update,", "--> The instance has the label \"env\". - ``labels.env:dev`` -->", "routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata", "ValueError( \"Received both a transport instance and \" \"credentials; these", ") else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse", "the name of a label Some examples of using filters", "charges). Instances offer isolation: problems with databases in one instance", "The name of the instance to be deleted. Values are", "private key json file. args: Additional arguments to pass to", "track the progress of updating the instance. If the named", "Lists the supported instance configurations for a given project. Example:", "permissions and # limitations under the License. \"\"\"Accesses the google.spanner.admin.instance.v1", "returned. If a dict is provided, it must be of", "default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata is None:", "= google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info =", "billing is based on the newly-requested level. Until completion of", "completion of the returned operation: - Cancelling the operation renders", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) if", "is the name of a label Some examples of using", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"test_iam_permissions\"]( request, retry=retry, timeout=timeout,", "is requested. Values are of the form ``projects/<project>``. page_size (int):", "over all results >>> for element in client.list_instances(parent): ... #", "label \"env\" with its value containing \"dev\". retry (Optional[google.api_core.retry.Retry]): A", "= google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceRequest(", "provided to the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable", "cached API call functions. # These are the actual callables", "upon completion of this request: - For resource types for", "that is provided to the method. Returns: A :class:`~google.api_core.operation.Operation` instance.", "the named instance already exists, ``CreateInstance`` returns ``ALREADY_EXISTS``. Immediately upon", ">>> >>> # Iterate over results one page at a", "instance are located (e.g., US-central, Europe). Configurations are created by", "import spanner_instance_admin_pb2_grpc from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import options_pb2", "client_config dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def from_service_account_file(cls, filename, *args,", "Save a dictionary of cached API call functions. # These", "in which to create the instance. Values are of the", "if credentials: raise ValueError( \"Received both a transport instance and", "send a user-agent string along with API requests. If ``None``,", "request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size ) if metadata is None:", "see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A retry object used", "of this request: - For resource types for which a", "metadata=None, ): \"\"\" Gets the access control policy for an", "): \"\"\" Lists all instances in the given project. Example:", "\"set_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"set_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy,", "If page streaming is performed per-page, this determines the maximum", "Args: name (str): Required. The name of the requested instance", "with a ``CANCELLED`` status. - All other attempts to modify", "of the project for which a list of supported instance", "client.list_instances(parent): ... # process element ... pass >>> >>> >>>", "import google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1 import google.api_core.page_iterator import google.api_core.path_template", "client will attempt to ascertain the credentials from the environment.", "to create, delete, modify and list instances. Instances are dedicated", "google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs[\"ListInstances\"].retry, default_timeout=self._method_configs[\"ListInstances\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent,", "Cloud Spanner serving and storage resources to be used by", "# timeout, and the like. self._inner_api_calls = {} # Service", "has the label \"env\" and the value of the label", "deprecated.\", PendingDeprecationWarning, stacklevel=2, ) else: client_config = instance_admin_client_config.config if channel:", "instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Updates an instance,", "`permissions`: >>> permissions = [] >>> >>> response = client.test_iam_permissions(resource,", "may be omitted, but if specified must be ``<parent>/instances/<instance_id>``. If", "responsible for actually making the API calls. The default transport", "self.transport.get_instance_config, default_retry=self._method_configs[\"GetInstanceConfig\"].retry, default_timeout=self._method_configs[\"GetInstanceConfig\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata", "instance via the API continues to give the pre-request resource", "= spanner_admin_instance_v1.InstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> #", "This argument is mutually exclusive with providing a transport instance", "fields eligible for filtering are: - ``name`` - ``display_name`` -", "The instance can be deleted. - All other attempts to", "get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information", "pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls[\"delete_instance\"]( request,", "a particular instance configuration. Example: >>> from google.cloud import spanner_admin_instance_v1", "the default settings for retry and timeout for each RPC", "request = iam_policy_pb2.GetIamPolicyRequest( resource=resource, options=options_ ) if metadata is None:", "resources, fewer resources are available for other databases in that", "instance. Values are of the form ``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]):", "'storage.*') are not allowed. For more information see `IAM Overview", "the `*_config.py` # file next to this one.) self._method_configs =", "- All other attempts to modify the instance are rejected.", "return self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def delete_instance( self,", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> response", "If a dict is provided, it must be of the", "retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_instance_admin_pb2.Instance, metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata,", "process element ... pass >>> >>> >>> # Alternatively: >>>", "also be a callable which returns a transport instance. Callables", "utf-8 -*- # # Copyright 2020 Google LLC # #", "Copyright 2020 Google LLC # # Licensed under the Apache", "user has ``spanner.instances.list`` permission on the containing Google Cloud Project.", "service. If none are specified, the client will attempt to", "above. - ``labels.env:*`` --> The instance has the label \"env\".", "Sets the access control policy on an instance resource. Replaces", "used by Cloud IAM. If a dict is provided, it", "which to make calls. This argument is mutually exclusive with", "named instance does not exist, returns ``NOT_FOUND``. Immediately upon completion", "Europe). Configurations are created by Google based on resource availability.", "Spanner Instance Admin API The Cloud Spanner Instance Admin API", "it must be of the same form as the protobuf", "= '' >>> >>> response = client.get_iam_policy(resource) Args: resource (str):", "``ALREADY_EXISTS``. Immediately upon completion of this request: - The instance", "a transport instance. Callables will be sent the credentials as", "irrevocably disappear from the API. All data in the databases", "stacklevel=2, ) else: client_config = instance_admin_client_config.config if channel: warnings.warn( \"The", "\"get_iam_policy\" not in self._inner_api_calls: self._inner_api_calls[ \"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy,", "the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]):", "for retry and timeout for each RPC # from the", "raise ValueError( \"Received both a transport instance and \" \"credentials;", "spanner_admin_instance_v1 >>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name =", "which dictates where the serving resources for the Cloud Spanner", "of the returned operation: - Cancelling the operation renders the", "allocation has been requested, billing is based on the newly-requested", "google.cloud.spanner_admin_instance_v1.gapic import enums from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config from google.cloud.spanner_admin_instance_v1.gapic.transports import", "is ``Instance``, if successful. Example: >>> from google.cloud import spanner_admin_instance_v1", "is being specified. See the operation documentation for the appropriate", ">>> >>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> response =", "The size of the policy is limited to a few", "and timeout logic. if \"list_instances\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instances\"", "from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 from google.protobuf", "The instance is readable via the API, with all requested", "self._inner_api_calls[ \"get_iam_policy\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs[\"GetIamPolicy\"].retry, default_timeout=self._method_configs[\"GetIamPolicy\"].timeout, client_info=self._client_info, )", "self._inner_api_calls: self._inner_api_calls[ \"get_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info,", "REQUIRED: The resource for which the policy is being requested.", "data in the databases is permanently deleted. Example: >>> from", "if \"create_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"create_instance\" ] = google.api_core.gapic_v1.method.wrap_method(", "Cloud Spanner instance resource will result in a NOT_FOUND error", "example, if one database in an instance receives a lot", "a NOT_FOUND error if the user has ``spanner.instances.list`` permission on", "restoring resources to their pre-request values. The operation is guaranteed", "of the form ``projects/<project>``. page_size (int): The maximum number of", "spanner_instance_admin_pb2.UpdateInstanceRequest( instance=instance, field_mask=field_mask ) if metadata is None: metadata =", "a list of instances is requested. Values are of the", "the interface for this client. This is the key used", "~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present, specifies the subset of ``Instance``", "project_path(cls, project): \"\"\"Return a fully-qualified project string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\",", "resource (str): REQUIRED: The resource for which the policy is", "modification. The ``metadata`` field type is ``UpdateInstanceMetadata``. The ``response`` field", "Required. A mask specifying which fields in ``Instance`` should be", "retry and timeout logic. if \"update_instance\" not in self._inner_api_calls: self._inner_api_calls[", "form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry", "of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` retry", "# Iterate over all results >>> for element in client.list_instances(parent):", "\"delete_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request", "``field_mask`` need be included. If a dict is provided, it", "\"list_instances\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instances\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances,", "renders the instance immediately unreadable via the API. - The", "resources to be used by Cloud Spanner databases. Each instance", "client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instance_configs\", request_token_field=\"page_token\",", "(str): Required. The name of the instance to be deleted.", "requested. See the operation documentation for the appropriate value for", "the access control policy on an instance resource. Replaces any", "import functools import pkg_resources import warnings from google.oauth2 import service_account", "dictionary of cached API call functions. # These are the", "Immediately upon completion of this request: - For resource types", "between 2 and 64 characters in length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]):", "available for other databases in that instance, and their performance", "'' >>> >>> # TODO: Initialize `instance`: >>> instance =", "metadata. >>> metadata = response.metadata() Args: parent (str): Required. The", "the service. if transport: if callable(transport): self.transport = transport( credentials=credentials,", "default_retry=self._method_configs[\"ListInstanceConfigs\"].retry, default_timeout=self._method_configs[\"ListInstanceConfigs\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size )", "in the databases is permanently deleted. Example: >>> from google.cloud", "specifying options to ``GetIamPolicy``. This field is only used by", "calls. This argument is mutually exclusive with ``credentials``; providing both", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial(", "the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry", "method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request", "items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator def get_instance( self, name,", "information see `IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`__. retry (Optional[google.api_core.retry.Retry]): A retry object", "for the Cloud Spanner instance are located (e.g., US-central, Europe).", "field is only used by Cloud IAM. If a dict", "but if specified must be ``<parent>/instances/<instance_id>``. If a dict is", "def test_iam_permissions( self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests.", "over results one page at a time >>> for page", "self._inner_api_calls[\"get_iam_policy\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def test_iam_permissions( self, resource,", "Args: transport (Union[~.InstanceAdminGrpcTransport, Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport instance, responsible", "# limitations under the License. \"\"\"Accesses the google.spanner.admin.instance.v1 InstanceAdmin API.\"\"\"", "client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the", "be a callable which returns a transport instance. Callables will", "import operations_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2", "timeout logic. if \"list_instance_configs\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ]", "guaranteed to succeed at undoing all resource changes, after which", "case insensitive. The fields eligible for filtering are: - ``name``", "per-operation charges for use of the instance (though there may", "Otherwise, only fields mentioned in ``field_mask`` need be included. If", "resources contained in the underlying API response. If page streaming", "of time, in seconds, to wait for the request to", "): \"\"\" Gets information about a particular instance. Example: >>>", "affect the return value. If page streaming is performed per-page,", "self.transport.delete_instance, default_retry=self._method_configs[\"DeleteInstance\"].retry, default_timeout=self._method_configs[\"DeleteInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name) if metadata", "maximum number of resources contained in the underlying API response.", "AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator", "Note that if ``retry`` is specified, the timeout applies to", "metadata that is provided to the method. Returns: A :class:`~google.api_core.operation.Operation`", "import google.api_core.operations_v1 import google.api_core.page_iterator import google.api_core.path_template import grpc from google.cloud.spanner_admin_instance_v1.gapic", "access control policy on an instance resource. Replaces any existing", ">>> resource = '' >>> >>> response = client.get_iam_policy(resource) Args:", "client_info=self._client_info, ) request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) if metadata", "Version 2.0 (the \"License\"); # you may not use this", ">>> >>> # Handle metadata. >>> metadata = response.metadata() Args:", "instance, and begins allocating or releasing resources as requested. The", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name) if metadata is None: metadata", "A ``Channel`` instance through which to make calls. This argument", "using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed", "retry and timeout for each RPC # from the client", ">>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata", "parent=parent, page_size=page_size, filter=filter_ ) if metadata is None: metadata =", "the ``resource``. Permissions with wildcards (such as '*' or 'storage.*')", "google.longrunning import operations_pb2 from google.protobuf import empty_pb2 from google.protobuf import", "If ``None``, then default info will be used. Generally, you", "Updates an instance, and begins allocating or releasing resources as", "def get_iam_policy( self, resource, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "and # deserialization and actually sending data to the service.", "policy is being requested. See the operation documentation for the", "method to add retry and timeout logic. if \"get_instance_config\" not", ">>> name = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> response = client.get_instance(name)", "the instance are rejected. Upon completion of the returned operation:", "= google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs[\"CreateInstance\"].retry, default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.CreateInstanceRequest(", "that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance.", "a callable which returns a transport instance. Callables will be", ">>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_path('[PROJECT]',", "if ``retry`` is specified, the timeout applies to each individual", "\"\"\"Creates an instance of this client using the provided credentials", "contains \"howl\" and it has the label \"env\" with its", "and list instances. Instances are dedicated Cloud Spanner serving and", "an instance resource. Returns an empty policy if an instance", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout,", "instance can be deleted. - All other attempts to modify", "routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"test_iam_permissions\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "this determines the maximum number of resources in a page.", "the instance to be deleted. Values are of the form", "filename (str): The path to the service account private key", "by applicable law or agreed to in writing, software #", "If the named instance does not exist, returns ``NOT_FOUND``. Immediately", "via the API, with all requested attributes but no allocated", "requested levels). - Databases can be created in the instance.", "def callback(operation_future): ... # Handle result. ... result = operation_future.result()", "The instance and *all of its databases* immediately and irrevocably", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> # TODO: Initialize `instance`: >>> instance =", "google.iam.v1 import iam_policy_pb2 from google.iam.v1 import options_pb2 from google.iam.v1 import", "permissions to check for the ``resource``. Permissions with wildcards (such", "serving. The returned ``long-running operation`` can be used to track", "def update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "are of the form ``projects/<project>``. page_size (int): The maximum number", "transport instance. Callables will be sent the credentials as the", "(Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to", "is a valid policy but certain Cloud Platform services (such", "to modify the instance are rejected. Upon completion of the", "default_timeout=self._method_configs[\"CreateInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance )", "method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances.", "name may be omitted, but if specified must be ``<parent>/instances/<instance_id>``.", "Required. The name of the requested instance. Values are of", "for other databases in that instance, and their performance may", "to attach to requests. These credentials identify this application to", "none are specified, the client will attempt to ascertain the", "api_endpoint = self.SERVICE_ADDRESS if client_options: if type(client_options) == dict: client_options", "'[INSTANCE]') >>> >>> client.delete_instance(name) Args: name (str): Required. The name", "self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions", "name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header )", "(str): Required. The name of the requested instance configuration. Values", "amount of time, in seconds, to wait for the request", "based on the instances that exist and their sizes. After", "The constructed client. \"\"\" credentials = service_account.Credentials.from_service_account_file(filename) kwargs[\"credentials\"] = credentials", "for the appropriate value for this field. options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]):", "all results >>> for element in client.list_instance_configs(parent): ... # process", "self._inner_api_calls[ \"update_instance\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs[\"UpdateInstance\"].retry, default_timeout=self._method_configs[\"UpdateInstance\"].timeout, client_info=self._client_info, )", "and timeout logic. if \"delete_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"delete_instance\"", "parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header )", "update_instance( self, instance, field_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Updates", "services (such as Projects) might reject them. If a dict", "timeout=timeout, metadata=metadata, ), request=request, items_field=\"instances\", request_token_field=\"page_token\", response_token_field=\"next_page_token\", ) return iterator", "retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Updates an instance, and begins", "\"\"\"Return a fully-qualified project string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\", project=project )", "all successfully-allocated resources begins (some types may have lower than", "- Cancelling the operation renders the instance immediately unreadable via", "the subset of ``Instance`` fields that should be returned. If", "spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>> response", "@classmethod def instance_path(cls, project, instance): \"\"\"Return a fully-qualified instance string.\"\"\"", "A retry object used to retry requests. If ``None`` is", "~.InstanceAdminGrpcTransport]): A transport instance, responsible for actually making the API", "metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the", "Cloud Platform services (such as Projects) might reject them. If", "instance are rejected. - Reading the instance via the API", "API. The returned ``long-running operation`` will have a name of", "may have lower than the requested levels). - All newly-reserved", "instance to create. Valid identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]``", "page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests.", "with its value containing \"dev\". retry (Optional[google.api_core.retry.Retry]): A retry object", "{} >>> >>> response = client.set_iam_policy(resource, policy) Args: resource (str):", ">>> >>> response = client.create_instance(parent, instance_id, instance) >>> >>> def", "client_info=self._client_info, ) request = spanner_instance_admin_pb2.ListInstanceConfigsRequest( parent=parent, page_size=page_size ) if metadata", "If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo):", "track the instance modification. The ``metadata`` field type is ``UpdateInstanceMetadata``.", "present, specifies the subset of ``Instance`` fields that should be", "applicable law or agreed to in writing, software # distributed", "to above. - ``NAME:howl`` --> Equivalent to above. - ``labels.env:*``", "string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}\", project=project ) def __init__( self, transport=None,", "google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"],", "The field mask must always be specified; this prevents any", "options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets the access control", "metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata, ) def list_instance_configs( self, parent, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None,", "transport( credentials=credentials, default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport, address=api_endpoint, ) else: if credentials: raise ValueError(", "where the serving resources for the Cloud Spanner instance are", "configuration in the client_config dictionary. _INTERFACE_NAME = \"google.spanner.admin.instance.v1.InstanceAdmin\" @classmethod def", "\"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, ) @classmethod def project_path(cls, project): \"\"\"Return a", "to be used by Cloud Spanner databases. Each instance has", "if \"get_instance_config\" not in self._inner_api_calls: self._inner_api_calls[ \"get_instance_config\" ] = google.api_core.gapic_v1.method.wrap_method(", "exclusive.\" ) self.transport = transport else: self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint,", ">>> # TODO: Initialize `policy`: >>> policy = {} >>>", "json file. args: Additional arguments to pass to the constructor.", ") metadata.append(routing_metadata) operation = self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata )", "default_retry=self._method_configs[\"GetInstance\"].retry, default_timeout=self._method_configs[\"GetInstance\"].timeout, client_info=self._client_info, ) request = spanner_instance_admin_pb2.GetInstanceRequest( name=name, field_mask=field_mask )", "request: - The instance is readable via the API, with", "valid policy but certain Cloud Platform services (such as Projects)", "set user options on the client. API Endpoint should be", "does not affect the return value. If page streaming is", "the environment. This argument is mutually exclusive with providing a", "``resource``. The size of the policy is limited to a", "to add retry and timeout logic. if \"list_instance_configs\" not in", "the instance modification. The ``metadata`` field type is ``UpdateInstanceMetadata``. The", "to add retry and timeout logic. if \"get_instance\" not in", "has a \"configuration\", which dictates where the serving resources for", "else: if credentials: raise ValueError( \"Received both a transport instance", "stacklevel=2, ) api_endpoint = self.SERVICE_ADDRESS if client_options: if type(client_options) ==", "~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to be applied to the", "be used to track the progress of preparing the new", ">>> >>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>> response =", "for which a decrease in the instance's allocation has been", "results >>> for element in client.list_instance_configs(parent): ... # process element", "field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about a", "of ``Instance`` fields that should be returned. If absent, all", "google.api_core.page_iterator import google.api_core.path_template import grpc from google.cloud.spanner_admin_instance_v1.gapic import enums from", "API continues to give the pre-request resource levels. Upon completion", ">>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>> >>> response = client.get_instance_config(name)", "begins for all successfully-allocated resources (some types may have lower", "# You may obtain a copy of the License at", "if \"list_instance_configs\" not in self._inner_api_calls: self._inner_api_calls[ \"list_instance_configs\" ] = google.api_core.gapic_v1.method.wrap_method(", "metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) def delete_instance(", "metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instance_configs\"], retry=retry, timeout=timeout, metadata=metadata,", "has a name. - ``name:Howl`` --> The instance's name contains", "ID of the instance to create. Valid identifiers are of", "TODO: Initialize `resource`: >>> resource = '' >>> >>> response", "preparing it to begin serving. The returned ``long-running operation`` can", "self._inner_api_calls[\"update_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client,", "`instance`: >>> instance = {} >>> >>> response = client.create_instance(parent,", "page in client.list_instance_configs(parent).pages: ... for element in page: ... #", "instance through which to make calls. This argument is mutually", "via the API. - The instance can be deleted. -", "a dict is provided, it must be of the same", "specified must be ``<parent>/instances/<instance_id>``. If a dict is provided, it", "is readable via the API, with all requested attributes but", "Args: instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which", ">>> >>> # TODO: Initialize `policy`: >>> policy = {}", "interface for this client. This is the key used to", "is provided, it must be of the same form as", "via the API. The returned ``long-running operation`` will have a", "transport methods, wrapped with `wrap_method` to add retry, # timeout,", "API. - The instance can be deleted. - All other", "an instance and begins preparing it to begin serving. The", "insensitive. The fields eligible for filtering are: - ``name`` -", "None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( gapic_version=_GAPIC_LIBRARY_VERSION ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION", "the returned operation: - Billing for all successfully-allocated resources begins", "the method. Returns: A :class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the", "2020 Google LLC # # Licensed under the Apache License,", ":class:`~google.api_core.operation.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "the instance via the API continues to give the pre-request", "of the returned operation: - Billing begins for all successfully-allocated", "The instance's name contains \"howl\" and it has the label", "argument and the default transport class as the second argument.", "Deletes an instance. Immediately upon completion of the request: -", "def set_iam_policy( self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\"", "iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata, ),", "certain Cloud Platform services (such as Projects) might reject them.", "are: - ``name:*`` --> The instance has a name. -", "IAM. If a dict is provided, it must be of", "affect each other. For example, if one database in an", "string along with API requests. If ``None``, then default info", "and 64 characters in length. instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The", "fields in ``Instance`` should be updated. The field mask must", "name, field_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Gets information about", "resource)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header )", "timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): \"\"\" Returns permissions that the caller has", "= instance_admin_grpc_transport.InstanceAdminGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None:", "resources (some types may have lower than the requested levels).", "field_mask=field_mask ) if metadata is None: metadata = [] metadata", "): \"\"\" Updates an instance, and begins allocating or releasing", "in ``Instance`` from being erased accidentally by clients that do", "of the response using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If", "returned. If absent, all ``Instance`` fields are returned. If a", "databases in one instance will not affect other instances. However,", "self._inner_api_calls: self._inner_api_calls[ \"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info,", "DEPRECATED. A ``Channel`` instance through which to make calls. This", "The ``metadata`` field type is ``CreateInstanceMetadata``. The ``response`` field type", "= '' >>> >>> # TODO: Initialize `policy`: >>> policy", "the pre-request resource levels. Upon completion of the returned operation:", "should be updated. The field mask must always be specified;", "\"\"\"The default address of the service.\"\"\" # The name of", "for page in client.list_instances(parent).pages: ... for element in page: ...", "request = spanner_instance_admin_pb2.ListInstancesRequest( parent=parent, page_size=page_size, filter=filter_ ) if metadata is", "instances. You can also iterate over the pages of the", "and *all of its databases* immediately and irrevocably disappear from", ">>> >>> client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_config_path('[PROJECT]',", "permanently deleted. Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>>", "client = spanner_admin_instance_v1.InstanceAdminClient() >>> >>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]') >>>", ":class:`~google.api_core.page_iterator.PageIterator` instance. An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances. You can also", "readable via the API, with all requested attributes but no", "and timeout logic. if \"update_instance\" not in self._inner_api_calls: self._inner_api_calls[ \"update_instance\"", "self._inner_api_calls = {} # Service calls def create_instance( self, parent,", "used to retry requests. If ``None`` is specified, requests will", "information about a particular instance configuration. Example: >>> from google.cloud", "Args: parent (str): Required. The name of the project in", "metadata that is provided to the method. Returns: A :class:`~google.api_core.page_iterator.PageIterator`", ") metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout,", "about a particular instance. Example: >>> from google.cloud import spanner_admin_instance_v1", "response_token_field=\"next_page_token\", ) return iterator def get_instance_config( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT,", "(str): An expression for filtering the results of the request.", "= google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls[\"get_instance_config\"]( request, retry=retry, timeout=timeout,", "be of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`", "of the request. Filter rules are case insensitive. The fields", "configurations for a given project. Example: >>> from google.cloud import", "@classmethod def instance_config_path(cls, project, instance_config): \"\"\"Return a fully-qualified instance_config string.\"\"\"", "in client.list_instance_configs(parent): ... # process element ... pass >>> >>>", "kwargs: Additional arguments to pass to the constructor. Returns: InstanceAdminClient:", "self, transport=None, channel=None, credentials=None, client_config=None, client_info=None, client_options=None, ): \"\"\"Constructor. Args:", "which a list of instances is requested. Values are of", "\"License\"); # you may not use this file except in", "google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls[\"list_instances\"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field=\"instances\",", "the instance's tables. - The instance's new resource levels are", "# Alternatively: >>> >>> # Iterate over results one page", "in client.list_instances(parent): ... # process element ... pass >>> >>>", "attempt to ascertain the credentials from the environment. This argument", "\"test_iam_permissions\" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, default_retry=self._method_configs[\"TestIamPermissions\"].retry, default_timeout=self._method_configs[\"TestIamPermissions\"].timeout, client_info=self._client_info, ) request", "operations_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 _GAPIC_LIBRARY_VERSION", "field. permissions (list[str]): The set of permissions to check for", "default_timeout=self._method_configs[\"SetIamPolicy\"].timeout, client_info=self._client_info, ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) if metadata is", "doing so will raise an exception. client_config (dict): DEPRECATED. A", "object for specifying options to ``GetIamPolicy``. This field is only", "# Iterate over all results >>> for element in client.list_instance_configs(parent):", "self._inner_api_calls[\"create_instance\"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client,", "Required. The ID of the instance to create. Valid identifiers", "Platform services (such as Projects) might reject them. If a", "the default transport class as the second argument. channel (grpc.Channel):", "def create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ):", "client using the provided credentials file. Args: filename (str): The", "afterward: - The instance and *all of its databases* immediately", "to set this if you're developing your own client library.", "with databases in one instance will not affect other instances.", "timeout, and the like. self._inner_api_calls = {} # Service calls", "completion of the returned operation: - Billing begins for all", ":class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any", "import field_mask_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(\"google-cloud-spanner\").version class InstanceAdminClient(object): \"\"\" Cloud Spanner", "Immediately upon completion of this request: - The instance is", "Example: >>> from google.cloud import spanner_admin_instance_v1 >>> >>> client =", "credentials as the first argument and the default transport class", "options to ``GetIamPolicy``. This field is only used by Cloud", "in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to", "= client_options.api_endpoint # Instantiate the transport. # The transport is", "fully-qualified instance_config string.\"\"\" return google.api_core.path_template.expand( \"projects/{project}/instanceConfigs/{instance_config}\", project=project, instance_config=instance_config, ) @classmethod", "of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions` retry", "as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object", "an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to", "of the same form as the protobuf message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` retry", "(Optional[float]): The amount of time, in seconds, to wait for", "the form ``projects/<project>/instances/<instance>``. field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present,", "- ``name:*`` --> The instance has a name. - ``name:Howl``", "API.\"\"\" import functools import pkg_resources import warnings from google.oauth2 import", "Additional metadata that is provided to the method. Returns: A", "for the request to complete. Note that if ``retry`` is", "pages of the response using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError:", "that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance.", "exists but does not have a policy set. Authorization requires", "= {} >>> >>> # TODO: Initialize `field_mask`: >>> field_mask", "a name of the format ``<instance_name>/operations/<operation_id>`` and can be used", "pass to the constructor. Returns: InstanceAdminClient: The constructed client. \"\"\"" ]
[ "Forward input file does not end in _R1. ', forward_data))", "data from our own objects if stage == 'gtype': pass", "etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input = etree.Element('input', type='threeprime',", "RV references have identical filenames. Will create indexing issue.')) trigger", "was populated, create dictionary, Append keys if children: dd =", "if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "Colour.end, 'XML Config: Given demultiplexing reverse adapter position invalid! [5P,", "end in _R1. ', forward_data)) sys.exit(2) ## ## Check reverse", "or N.')) continue else: break if purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold,", "is not a valid integer.')) trigger = True ## ##", "Colour.end, 'XML Config: Demultiplexing flag is not set to True/False.'))", "trigger def extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory, '*')) for extract_target in", "## try for -c style, except AttributeError for -b style", "self class DataLoader: def __init__(self, database, descriptor): self.database = database", "= self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag == 'True' or atypical_flag ==", "'Creating instance run directory.. ')) mkdir_p(run_dir) ## Inform user it's", "buffer size to slice from above array scraping_buffer = 8", "'shd__ ', Colour.end, 'XML Config: Specified forward reference file is", "not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error_rate", "not file_count % 2 == 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "is not 0.0 < x < 1.0.')) trigger = True", "= True return trigger def extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory, '*'))", "'y' or 'yes' will yield True. \"\"\" boolean_value = string.lower(boolean_value)", "+ k, v) for k, v in t.attrib.items()) if t.text:", "reverse_reference = self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "Config: Failure, exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'XML", "glob.glob(os.path.join(data_path, '*')) sorted_input = sorted(input_files) sequence_pairs = [] file_count =", "'XML Config: Specified forward reference file is not a fa/fas", "== 'y') and not (purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ',", "not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "configuration file reader. Opens a configuration file, and if valid,", "jobname: target_output = os.path.join(output_root, jobname) if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__", "## ## Alignment flag settings if alignment_flag == 'True': min_seed_length", "'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing pre-existing Jobname Prefix: ',", "reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase not", "flag is not True/False.')) trigger = True ## ## Demultiplexing", "class ConfigReader(object): \"\"\" The configuration file reader. Opens a configuration", "self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\" Method which validates the configuration file's", "os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root] = [forward_data, reverse_data, instance_path, seq_qc_path, align_path,", "XML data_root = etree.Element('data') loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ##", "is(are) invalid.')) trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for", "if type(input_object) is int: if stage == 3: cleanse_target =", "= self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "real directory ## Generates folder name based on date (for", "Colour.end, 'XML Config: Specified mismatch_penalty integer is invalid.')) trigger=True indel_penalty_raw", "into one file if direction == 'fw': toutfi = open(temp_output,", "could not be found.')) trigger = True for xmlfile in", "detected in FW adapter sequence.')) trigger = True reverse_adapter =", "trigger = True ## ## Trimming flag settings if sequence_qc_flag", "Get input files from data path ## Sort so that", "while True: purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job folder", "Config: Parsing parameters successful!')) class DataClump(dict): \"\"\"Container object for datasets:", "for ## AttributeError in the situation where instance_params origin differs", "lxml import etree from reportlab.pdfgen import canvas class Colour: def", "binary: ', binary, '!')) raise NameError ## ## To determine", "mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "doesn't exist, make it ## Then make the run directory", "', Colour.end, 'XML Config: Specified forward reference file is not", "settings dictionary is full of valid settings! \"\"\" trigger =", "= le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value):", "'t', 'y' or 'yes' will yield True. \"\"\" boolean_value =", "forward reference file could not be found.')) trigger = True", "extract_target in target_files: unzipd = subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target],", "AP]')) trigger = True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in", "= input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path, label + '.fa') temp_output =", "if not maximum_length == '': if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "value is not an integer.')) trigger = True if trigger:", "+ '_concat.fa') gen_process = subprocess.Popen(['generatr', '-i', input_xml, '-o', target_output], stdout=subprocess.PIPE,", "= etree.Element('input', type='threeprime', flank=tp_flank) for node in [fp_input, cag_region, intervening,", "None: log.error(\"No configuration file specified!\") else: self.config_file = etree.parse(self.config_filename) ##", "self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',') for individual_gaextend in gap_extend_penalty: if not", "Nodes fp_input = etree.Element('input', type='fiveprime', flank=fp_flank) cag_region = etree.Element('input', type='repeat_region',", "except NameError: trigger=True if genotyping == 'True': try:type_func('samtools') except NameError:", "purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing pre-existing Jobname", "Specified quality threshold integer out of range (0-38).')) trigger =", "extract_target], stderr=subprocess.PIPE) unzipd.wait() return True def sequence_pairings(data_path, instance_rundir): ## ##", "seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "Colour.end, 'Creating output root... ')) mkdir_p(output_root) run_dir = os.path.join(output_root, 'ScaleHDRun_'+today)", "binary_result = [] binary_string = 'type {}'.format(binary) binary_subprocess = subprocess.Popen([binary_string],", "True if not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "dtd_file.close() def set_dictionary(self): \"\"\" Takes the now validated XML and", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file is not an", "data[i] = d[:-1] label = d[-1] labels.append(label) le = preprocessing.LabelEncoder()", "= len(sorted_input) if not file_count % 2 == 0: log.error('{}{}{}{}'.format(Colour.red,", "prefix: ', jobname)) run_dir = os.path.join(output_root, jobname) mkdir_p(run_dir) else: purge_choice", "from csv, into objects in preparation for bunch() data_file_name =", "genotyping = instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling'] if quality_control == 'True':", "quality threshold integer is invalid.')) trigger = True elif not", "where instance_params origin differs ## try for -c style, except", "True trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag", "## Ensures root output is a real directory ## Generates", "try for -c style, except AttributeError for -b style try:", "for variables within the pipeline. Recursion adapted from http://stackoverflow.com/a/9286702 \"\"\"", "is invalid.')) trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',') for", "a directory or file. If the path exists, True is", "if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file", "integer is invalid.')) trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop),", "Instance flag settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag ==", "is not a valid integer.')) trigger = True maximum_length =", "in mutate_list: loc = mutate_list.index(target_fqfile) mutate_list[loc] = altered_path return mutate_list", "__author__ = '<EMAIL>' ## ## Imports import string import os", "## Sort so that ordering isn't screwy on linux input_files", "v) for k, v in t.attrib.items()) if t.text: text =", "'Adapter' or trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "output_root = output_argument[0] if jobname: target_output = os.path.join(output_root, jobname) if", "Specified forward reference file could not be found.')) trigger =", "if not (trimming_type == 'Quality' or trimming_type == 'Adapter' or", "', Colour.end, 'XML Config: Specified min_length is not a valid", "= self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "= database self.descriptor = descriptor def load_model(self): ## Loads description", "sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir, sample_root,", "exists, True is returned. If the path does not exist,", "not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified quality", "be found.')) trigger = True for xmlfile in parsed_arguments.config: if", "directory for datetime if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end,", "== 'Adapter' or trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "with open(input_report_file, 'r') as trpf: trim_lines = trpf.readlines() ## ##", "'shd__ ', Colour.end, 'XML Config: Invalid character detected in RV", "== 'True' or demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "is invalid.')) trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "', Colour.end, 'XML Config: Specified seeded_chain_drop integer is invalid.')) trigger=True", "'shd__ ', Colour.end, 'XML Config: FW and RV references have", "trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Genotype Prediction control flag", "## since we already have the data from our own", "sys import glob import datetime import subprocess import logging as", "indel_penalty: if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file does not", "trim_lines[summary_start:summary_start + scraping_buffer] trpf.close() return summary_data[2:] ## ## If the", "unit='CCG', start=ccgstart, end=ccgend) cct_region = etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen),", "'r') as alnrpf: align_lines = alnrpf.readlines() alnrpf.close() ## ## No", "trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "'.join(cleanse_target) else: return '*' def mkdir_p(path): try: os.makedirs(path) except OSError", "'shd__ ', Colour.end, 'XML Config: Specified band_width integer is invalid.'))", "folder name based on date (for run ident) date =", "if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "No need to tidy up report for genotyping ## since", "to dictionary, validate vs ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config() def validate_against_dtd(self):", "== 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Calling", "d[-1] labels.append(label) le = preprocessing.LabelEncoder() le.fit(labels) hash_int_labels = le.transform(labels) return", "trpf.close() return summary_data[2:] ## ## If the argument input_report_file is", "instance_rundir): ## ## Get input files from data path ##", "the parameters within the file to a dictionary object, reader", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified chain_drop float is", "not be found.')) trigger = True for xmlfile in parsed_arguments.config:", "target_fqfile, altered_path): if target_fqfile in mutate_list: loc = mutate_list.index(target_fqfile) mutate_list[loc]", "Specified error tolerance is not 0.0 < x < 1.0.'))", "flag settings if alignment_flag == 'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if", "for i in range(0, len(trim_lines)): if '== Summary ==' in", "target_files = glob.glob(os.path.join(input_data_directory, '*')) for extract_target in target_files: if extract_target.lower().endswith(('.fq.gz',", "= self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "not True/False.')) trigger = True snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if not", "up report for genotyping ## since we already have the", "from io import StringIO import PyPDF2 from sklearn import preprocessing", "'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence Alignment flag", "= temp_output return target_output def seek_target(input_list, target): for i in", "shells/config files def type_func(binary): binary_result = [] binary_string = 'type", "' ' or character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name", "string provided by parameter string is empty. False indicates the", "direction): ##TODO docstring label = input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path, label", "viewed through accessing the config_dict variable. \"\"\" def __init__(self, scriptdir,", "trigger = True if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "if parsed_arguments.jobname: for character in parsed_arguments.jobname: if character is '", "trimming.. if stage == 'trim': with open(input_report_file, 'r') as trpf:", "if not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "charbase in reverse_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "not (purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid input.", "input directory.')) trigger = True forward_reference = self.config_dict['@forward_reference'] if not", "==' in trim_lines[i]: summary_start = i ## ## Slice and", "\"\"\" def recursive_generation(t): d = {t.tag: {} if t.attrib else", "Alignment flag is not set to True/False.')) trigger = True", "'shd__ ', Colour.end, 'Invalid input. Please input Y or N.'))", "input_report_file): ## ## If the argument input_report_file is from trimming..", "ranges required, only skip first line return align_lines[1:] ## ##", "integer(s) is(are) invalid.')) trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit():", "Specified quality threshold integer is invalid.')) trigger = True elif", "No ranges required, only skip first line return align_lines[1:] ##", "## Determine buffer size to slice from above array scraping_buffer", "found'.encode() in binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ', binary,", "reverse_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = ''; cagend = ''", "hash_int_labels = le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def", "string.lower(boolean_value) in ('yes', 'y', 'true', 't', '1') return boolean_value def", "trigger=True return trigger def sanitise_outputs(jobname, output_argument): run_dir = '' output_root", "\"', character, '\"')) trigger = True ## ## Config mode", "if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Failure, exiting.'))", "k, v in dd.items()}} ## ## Values for key if", "= etree.tostring(self.config_file, pretty_print=True) element_tree = cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree) self.config_dict", "'shd__ ', Colour.end, 'Job folder already exists. Delete existing folder?", "of XML structure \"\"\" ## ## Open > etree.DTD object", "existing folder? Y/N: ')) if not (purge_choice.lower() == 'y') and", "except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path):", "except NameError: trigger=True if alignment == 'True': try:type_func('seqtk') except NameError:", "unsupported files present so, quit \"\"\" trigger = False ##", "and not (purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid", "elif not int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "full of valid settings! \"\"\" trigger = False ## ##", "= '\\033[1m' underline = '\\033[4m' end = '\\033[0m' class ConfigReader(object):", "config file could not be found.')) trigger = True for", "not (demultiplexing_flag == 'True' or demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "= indel_penalty_raw.split(',') for individual_indelpen in indel_penalty: if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "(purge_choice.lower() == 'y') and not (purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__", "etree.tostring(data_root, pretty_print=True) with open(atypical_path, 'w') as xmlfi: xmlfi.write(s.decode()) xmlfi.close() return", "trigger = True ## ## Alignment flag settings if alignment_flag", "filesystem_exists_check and check_input_files if either return false, path is invalid", "True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase", "'SeqQC') align_path = os.path.join(instance_rundir, sample_root, 'Align') predict_path = os.path.join(instance_rundir, sample_root,", "## If the argument input_report_file is from trimming.. if stage", "check if parsed_arguments.jobname: for character in parsed_arguments.jobname: if character is", "invalid.')) trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "string contents. For example, a string with 'true', 't', 'y'", "character detected in FW adapter sequence.')) trigger = True reverse_adapter", "run directory for datetime if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ',", "invalid characters: \"', character, '\"')) trigger = True ## ##", "try:type_func('cutadapt') except NameError: trigger=True if alignment == 'True': try:type_func('seqtk') except", "ends with R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__", "'shd__ ', Colour.end, 'Creating instance run directory.. ')) mkdir_p(run_dir) ##", "doc, puts through generator, returns dictionary string_repr = etree.tostring(self.config_file, pretty_print=True)", "dictionary string_repr = etree.tostring(self.config_file, pretty_print=True) element_tree = cElementTree.XML(string_repr) self.config_dict =", "XML structure \"\"\" ## ## Open > etree.DTD object dtd_file", "the formatted xml doc, puts through generator, returns dictionary string_repr", "True forward_reference = self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "np.empty((n_samples, n_features)) temp = next(data_file) feature_names = np.array(temp) labels =", "the string provided by parameter string is empty. False indicates", "Then make the run directory for datetime if not os.path.exists(output_root):", "= self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "False ## ## Subfunction for recycling code ## Calls UNIX", "= '20' ## ## Create XML data_root = etree.Element('data') loci_root", "file specified!\") else: self.config_file = etree.parse(self.config_filename) ## ## Check config", "output_argument[0] if jobname: target_output = os.path.join(output_root, jobname) if not os.path.exists(target_output):", "If list was populated, create dictionary, Append keys if children:", "trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "pass, guarantees that the settings dictionary is full of valid", "trigger = True if not float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "min_overlap is not a valid integer.')) trigger = True minimum_length", "'!')) raise NameError ## ## To determine which binaries to", "if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating output root...", "', Colour.end, 'XML Config: Given demultiplexing forward adapter position invalid!", "in everywhere else in pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path =", "from collections import defaultdict from xml.etree import cElementTree from lxml", "'\\033[93m' red = '\\033[91m' bold = '\\033[1m' underline = '\\033[4m'", "recursive_generation(t): d = {t.tag: {} if t.attrib else None} children", "control flag is not set to True/False.')) trigger = True", "invalid.')) trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',') for individual_gaextend", "trigger = True elif not int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "exposes its keys as attributes.\"\"\" def __init__(self, **kwargs): dict.__init__(self, kwargs)", "not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating Output with prefix:", "self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag == 'True' or sequence_qc_flag == 'False'):", "for extract_target in target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ',", "without pairing!')) sys.exit(2) ## ## Optimise so code isn't recycled", "to delete pre-existing Job folder. Cannot write output.') else: ##", "'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = ''; cagend = '' intv = allele_object.get_intervening()", "{t.tag: {} if t.attrib else None} children = list(t) ##", "the configuration file's contents. If all pass, guarantees that the", "self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified chain_drop float", "in range(0, len(sorted_input), 2): file_pair = {} forward_data = sorted_input[i]", "defaultdict(list) for dc in map(recursive_generation, children): for k, v in", "self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "temp = next(data_file) feature_names = np.array(temp) labels = [] for", "with R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O:", "''; ccgend = '' ccglen = allele_object.get_ccg() cctlen = allele_object.get_cct()", "'G', 'C', 'T'] if demultiplexing_flag == 'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter']", "TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value): \"\"\" Given a string", "with open(data_file_name) as f: data_file = csv.reader(f) temp = next(data_file)", "NameError: trigger=True return trigger def sanitise_outputs(jobname, output_argument): run_dir = ''", "trigger = True trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if", "genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Genotype", "', Colour.end, 'XML Config: Specified error tolerance is not a", "individual_gaextend in gap_extend_penalty: if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "False def initialise_libraries(instance_params): trigger = False ## ## Subfunction for", "demultiplexing forward adapter position invalid! [5P, 3P, AP]')) trigger =", "os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified data directory", "## Trimming flag settings if sequence_qc_flag == 'True': trimming_type =", "not an integer.')) trigger = True if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Simple check to see if the string provided by parameter", "from reportlab.pdfgen import canvas class Colour: def __init__(self): pass purple", "character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has invalid characters:", "= self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "ccgend = '20' if direction == 'rv': cagstart = '100';", "Config: Specified seed_length_extension float is invalid.')) trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence']", "True if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Failure,", "If the argument input_report_file is from trimming.. if stage ==", "\"\"\" trigger = False ## ## Main configuration instance settings", "not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified mismatch_penalty", "'\"')) trigger = True ## ## Config mode check if", "demultiplexing_flag == 'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in forward_adapter:", "scraping_buffer] trpf.close() return summary_data[2:] ## ## If the argument input_report_file", "invalid! [5P, 3P, AP]')) trigger = True error_rate = self.config_dict['demultiplex_flags']['@error_rate']", "## Demultiplexing flag settings trim_adapter_base = ['A', 'G', 'C', 'T']", "Exception('User chose not to delete pre-existing Job folder. Cannot write", "vs ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config() def validate_against_dtd(self): \"\"\" Validate input", "if direction == 'fw': toutfi = open(temp_output, 'w') cat_process =", "AttributeError: quality_control = instance_params['quality_control'] alignment = instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction']", "will yield True. \"\"\" boolean_value = string.lower(boolean_value) in ('yes', 'y',", "Specified unpaired_pairing_penalty integer is invalid.')) trigger=True ## ## Genotype prediction", "= etree.Element('input', type='intervening', sequence=intv, prior='1') ccg_region = etree.Element('input', type='repeat_region', order='2',", "Colour.end, 'XML Config: Specified seeded_chain_drop integer is invalid.')) trigger=True seq_match_score", "'shd__ ', Colour.end, 'Creating Output with prefix: ', jobname)) run_dir", "see if the path, specified by parameter path, exists. Can", "alignment == 'True': try:type_func('seqtk') except NameError: trigger=True try:type_func('bwa') except NameError:", "'XML Config: Specified error_rate is not a valid integer.')) trigger", "a dictionary object, reader to be viewed through accessing the", "def seek_target(input_list, target): for i in range(0, len(input_list)): if target", "Instance variables self.scriptdir = scriptdir self.config_filename = config_filename self.dtd_filename =", "## Then make the run directory for datetime if not", "above array scraping_buffer = 8 if '-q' in trim_lines[1]: scraping_buffer", "Config: Specified unpaired_pairing_penalty integer is invalid.')) trigger=True ## ## Genotype", "Config: Specified min_overlap is not a valid integer.')) trigger =", "target_fqfile in mutate_list: loc = mutate_list.index(target_fqfile) mutate_list[loc] = altered_path return", "Config: Specified skip_seed_with_occurrence integer is invalid.')) trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop']", "in dc.items(): dd[k].append(v) d = {t.tag: {k: v[0] if len(v)", "mutate_list[loc] = altered_path return mutate_list def scrape_summary_data(stage, input_report_file): ## ##", "order='3', unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input = etree.Element('input', type='threeprime', flank=tp_flank) for", "with 'true', 't', 'y' or 'yes' will yield True. \"\"\"", "yield True. \"\"\" boolean_value = string.lower(boolean_value) in ('yes', 'y', 'true',", "trigger = True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag ==", "flag is not set to True/False.')) trigger = True sequence_qc_flag", "prediction flag settings if genotype_flag == 'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold']", "csv from io import StringIO import PyPDF2 from sklearn import", "ccgstart = '1'; ccgend = '20' if direction == 'rv':", "not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified prime_clipping_penalty integer(s) is(are)", "self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag == 'True' or demultiplexing_flag == 'False'):", "== 'True': try:type_func('java') except NameError: trigger=True try:type_func('fastqc') except NameError: trigger=True", "skip first line return align_lines[1:] ## ## No need to", "forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified forward reference", "= self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag == 'True' or snpcall_flag ==", "genotyping ## since we already have the data from our", "indel_penalty = indel_penalty_raw.split(',') for individual_indelpen in indel_penalty: if not individual_indelpen.isdigit():", "False ## ## Main configuration instance settings data_directory = self.config_dict['@data_dir']", "config against DTD ruleset i.e. confirms conformation of XML structure", "next(data_file) feature_names = np.array(temp) labels = [] for i, d", "is raised - else False is returned. \"\"\" if os.path.lexists(path):", "path could not be found.')) return False def check_input_files(input_format, input_file):", "not set to True/False.')) trigger = True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment']", "stage == 'align': with open(input_report_file, 'r') as alnrpf: align_lines =", "'\\033[92m' yellow = '\\033[93m' red = '\\033[91m' bold = '\\033[1m'", "'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Given demultiplexing forward", "so code isn't recycled for i in range(0, len(sorted_input), 2):", "## ## Open > etree.DTD object dtd_file = open(self.dtd_filename, 'r')", "reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input", "stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output = temp_output return target_output def", "quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "if len(v) == 1 else v for k, v in", "fqfile in glob.glob(os.path.join(data_directory, '*')): if not (fqfile.endswith('.fq') or fqfile.endswith('.fastq') or", "return target_output def seek_target(input_list, target): for i in range(0, len(input_list)):", "Specified seq_match_score integer is invalid.')) trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if", "defaultdict from xml.etree import cElementTree from lxml import etree from", "extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected compressed input data.", "attributes.\"\"\" def __init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__ = self class", "parameters successful!')) class DataClump(dict): \"\"\"Container object for datasets: dictionary-like object", "log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ', binary, '!')) raise NameError ## ##", "run_dir def replace_fqfile(mutate_list, target_fqfile, altered_path): if target_fqfile in mutate_list: loc", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_overlap is not", "end=cagend) intervening = etree.Element('input', type='intervening', sequence=intv, prior='1') ccg_region = etree.Element('input',", "cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2] return ' '.join(cleanse_target) else: return '*'", "True: purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job folder already", "to slice from above array scraping_buffer = 8 if '-q'", "= True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag == 'True'", "input. Please input Y or N.')) continue else: break if", "filenames. Will create indexing issue.')) trigger = True ## ##", "'-q' in trim_lines[1]: scraping_buffer += 1 ## ## Get Anchor", "found.')) trigger = True for xmlfile in parsed_arguments.config: if not", "'XML Config: Invalid character detected in reverse_adapter demultiplexing flag.')) trigger", "which validates the configuration file's contents. If all pass, guarantees", "flank=fp_flank) cag_region = etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend) intervening", "## ## Genotype prediction flag settings if genotype_flag == 'True':", "## ## Join typical and atypical reference into one file", "path ## Sort so that ordering isn't screwy on linux", "if raise_exception: raise ValueError(\"Empty string detected!\") return True def sanitise_inputs(parsed_arguments):", "= {} forward_data = sorted_input[i] reverse_data = sorted_input[i+1] ## ##", "forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase not", "is invalid.')) trigger=True ## ## Genotype prediction flag settings if", "If the argument input_report_file is from alignment.. if stage ==", "self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag == 'True' or alignment_flag == 'False'):", "## Config mode check if parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red,", "Specified trimming adapter not valid selection.')) trigger = True forward_adapter", "if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "open(self.dtd_filename, 'r') dtd_object = etree.DTD(dtd_file) ## ## If validation fails,", "{}'.format(binary) binary_subprocess = subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate()", "'r') as trpf: trim_lines = trpf.readlines() ## ## Determine buffer", "If the path does not exist, and raise_exception is set", "cag_region = etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend) intervening =", "etree.DTD object dtd_file = open(self.dtd_filename, 'r') dtd_object = etree.DTD(dtd_file) ##", "## ## Make Stage outputs for use in everywhere else", "self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "def validate_config(self): \"\"\" Method which validates the configuration file's contents.", "try:type_func('fastqc') except NameError: trigger=True try:type_func('cutadapt') except NameError: trigger=True if alignment", "'shd__ ', Colour.end, 'XML Config: Given demultiplexing forward adapter position", "## ## No ranges required, only skip first line return", "logging as log import numpy as np import csv from", "today = date + '-' + walltime ## If the", "integer.')) trigger = True if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "', Colour.end, 'XML Config: Sequence Quality control flag is not", "if 'not found'.encode() in binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary:", "\"\"\" Utilises filesystem_exists_check and check_input_files if either return false, path", "log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating Output with prefix: ', jobname))", "XML and extracts information from the tree into a python", "True minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length == '': if", "invalid.')) trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for individual_prclip", "raise an error if not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation failure", "is empty. False indicates the string is NOT empty. Parameter", "loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ## ## Loci Nodes fp_input", "check if parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "', Colour.end, 'XML Config: Specified forward reference file could not", "__init__(self): pass purple = '\\033[95m' cyan = '\\033[96m' darkcyan =", "= True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag == 'True'", "'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt)", "## Optimise so code isn't recycled for i in range(0,", "not exist, and raise_exception is set to True, an IOError", "## No need to tidy up report for genotyping ##", "'1') return boolean_value def empty_string_check(string, raise_exception=True): \"\"\" Simple check to", "= instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling'] if quality_control", "one file if direction == 'fw': toutfi = open(temp_output, 'w')", "log.error(\"No configuration file specified!\") else: self.config_file = etree.parse(self.config_filename) ## ##", "= sorted_input[i+1] ## ## Check forward ends with R1 forward_data_name", "Job Name has invalid characters: \"', character, '\"')) trigger =", "the object (memory) and raise an error if not dtd_object.validate(self.config_file):", "log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has invalid characters: \"', character, '\"'))", "pre-existing Job folder. Cannot write output.') else: ## Ensures root", "alnrpf: align_lines = alnrpf.readlines() alnrpf.close() ## ## No ranges required,", "an XML file.')) trigger = True return trigger def extract_data(input_data_directory):", "as f: descr_text = f.read() ## Loads data set from", "etree.Element('data') loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ## ## Loci Nodes", "i.e. confirms conformation of XML structure \"\"\" ## ## Open", "Please input Y or N.')) continue else: break if purge_choice.lower()", "directory or file. If the path exists, True is returned.", "not set to True/False.')) trigger = True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment']", "altered_path return mutate_list def scrape_summary_data(stage, input_report_file): ## ## If the", "integer is invalid.')) trigger = True elif not int(quality_threshold) in", "sample_root, 'Align') predict_path = os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root] = [forward_data,", "self.database with open(data_file_name) as f: data_file = csv.reader(f) temp =", "ccgend = '20' ## ## Create XML data_root = etree.Element('data')", "is invalid.')) trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "if valid, converts the parameters within the file to a", "'XML Config: Specified reverse reference file is not a fa/fas", "(adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", ":param index_path: :return: \"\"\" ##TODO docstring atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction,", "= self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase not in", "', Colour.end, 'XML Config: Atypical Realignment flag is not True/False.'))", "trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Failure, exiting.')) sys.exit(2)", "## ## Config mode check if parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]):", "is ' ' or character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job", "Colour.end, 'XML Config: Parsing parameters successful!')) class DataClump(dict): \"\"\"Container object", "datasets: dictionary-like object that exposes its keys as attributes.\"\"\" def", "seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "trigger = True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "'' ccglen = allele_object.get_ccg() cctlen = allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT'", "= '\\033[96m' darkcyan = '\\033[36m' blue = '\\033[94m' green =", "True ## ## Trimming flag settings if sequence_qc_flag == 'True':", "in prime_clipping_penalty: if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "= np.empty((n_samples, n_features)) temp = next(data_file) feature_names = np.array(temp) labels", "'shd__ ', Colour.end, 'Specified config file is not an XML", "stage == 'gtype': pass def generate_atypical_xml(label, allele_object, index_path, direction): \"\"\"", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seeded_chain_drop integer is", "loc = mutate_list.index(target_fqfile) mutate_list[loc] = altered_path return mutate_list def scrape_summary_data(stage,", "or trimming_type == 'Adapter' or trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "children or t.attrib: if text: d[t.tag]['#text'] = text else: d[t.tag]", "integer(s) is(are) invalid.')) trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',')", "dd.items()}} ## ## Values for key if t.attrib: d[t.tag].update(('@' +", "type_func('picard') except NameError: trigger=True try: type_func('freebayes') except NameError: trigger=True return", "or atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "Colour.end, 'XML Config: Specified forward reference file could not be", "except NameError: trigger=True try:type_func('cutadapt') except NameError: trigger=True if alignment ==", "'true', 't', '1') return boolean_value def empty_string_check(string, raise_exception=True): \"\"\" Simple", "guarantees that the settings dictionary is full of valid settings!", "return '*' def sanitise_alignment_output(input_object, input_list, stage): if type(input_object) is int:", "= True ## ## Demultiplexing flag settings trim_adapter_base = ['A',", "configuration file (just incase) if self.config_filename is None: log.error(\"No configuration", "the run directory for datetime if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__", "specified input directory.')) trigger = True forward_reference = self.config_dict['@forward_reference'] if", "= '\\033[92m' yellow = '\\033[93m' red = '\\033[91m' bold =", "== 3: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target) else: cleanse_target", "skip_seed_with_occurrence integer is invalid.')) trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if not", "path does not exist, and raise_exception is set to True,", "Get Anchor summary_start = 0 for i in range(0, len(trim_lines)):", "Config: Specified reverse reference file is not a fa/fas file.'))", "Config: SNP Quality Cutoff value is not an integer.')) trigger", "into objects in preparation for bunch() data_file_name = self.database with", "= True if not float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "tolerance is not a valid float.')) trigger = True if", "== 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Atypical Realignment", "len(sorted_input) if not file_count % 2 == 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "'XML Config: Sequence Quality control flag is not set to", "## Loci Nodes fp_input = etree.Element('input', type='fiveprime', flank=fp_flank) cag_region =", "trigger = True ## ## Instance flag settings demultiplexing_flag =", "flag settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag == 'True'", "## If the user specified root doesn't exist, make it", "self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "= '' while True: purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end,", "Y/N: ')) if not (purge_choice.lower() == 'y') and not (purge_choice.lower()", "== 'trim': with open(input_report_file, 'r') as trpf: trim_lines = trpf.readlines()", "for extract_target in target_files: unzipd = subprocess.Popen(['gzip', '-q', '-f', '-d',", "dictionary will be used for variables within the pipeline. Recursion", "seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC') align_path = os.path.join(instance_rundir, sample_root, 'Align')", "from the tree into a python dictionary {key: value}. This", "= self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "canvas class Colour: def __init__(self): pass purple = '\\033[95m' cyan", "return trigger def sanitise_outputs(jobname, output_argument): run_dir = '' output_root =", "import canvas class Colour: def __init__(self): pass purple = '\\033[95m'", "not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_seed_length", "of valid settings! \"\"\" trigger = False ## ## Main", "= False ## ## Main configuration instance settings data_directory =", "== 'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit(): if not", "unpaired_pairing_penalty integer is invalid.')) trigger=True ## ## Genotype prediction flag", "instance run directory.. ')) mkdir_p(run_dir) ## Inform user it's all", "'shd__ ', Colour.end, 'XML Config: Specified trimming adapter not valid", "try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if genotyping", "trigger = True forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not in", "1 else v for k, v in dd.items()}} ## ##", "= etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ## ## Loci Nodes fp_input =", "align_path, predict_path] sequence_pairs.append(file_pair) return sequence_pairs def filesystem_exists_check(path, raise_exception=True): \"\"\" Checks", "string (boolean_value), returns a boolean value representing the string contents.", "to True/False.')) trigger = True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if not", "recycling code ## Calls UNIX type for checking binaries present", "with open(modeldescr_name) as f: descr_text = f.read() ## Loads data", "conformation of XML structure \"\"\" ## ## Open > etree.DTD", "return True def sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check and check_input_files if", "= self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red,", "== 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Trimming type", "def generate_reference(input_xml, index_path, ref_indexes, direction): ##TODO docstring label = input_xml.split('/')[-1].split('.')[0]", "input file does not end in _R1. ', forward_data)) sys.exit(2)", "the now validated XML and extracts information from the tree", "csv.reader(f) temp = next(data_file) n_samples = int(temp[0]) n_features = int(temp[1])", "parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config", "else in pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir) seq_qc_path", "Config: Non FastQ/GZ data detected in specified input directory.')) trigger", "= instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control = instance_params['quality_control'] alignment = instance_params['sequence_alignment']", "not (snpcall_flag == 'True' or snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "binaries present ## Changed from WHICH as apparently type functions", "exception should be raised if the string is empty. If", "float is invalid.')) trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit():", "'shd__ ', Colour.end, 'XML Config: Specified indel_penalty integer(s) is(are) invalid.'))", "## Instance variables self.scriptdir = scriptdir self.config_filename = config_filename self.dtd_filename", "os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC') align_path = os.path.join(instance_rundir, sample_root,", "Colour.end, 'XML Config: Specified quality threshold integer out of range", "= True ## ## Config mode check if parsed_arguments.config: if", "if target_fqfile in mutate_list: loc = mutate_list.index(target_fqfile) mutate_list[loc] = altered_path", "intervening = etree.Element('input', type='intervening', sequence=intv, prior='1') ccg_region = etree.Element('input', type='repeat_region',", "def recursive_generation(t): d = {t.tag: {} if t.attrib else None}", "not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified reverse", "run_dir = os.path.join(output_root, jobname) mkdir_p(run_dir) else: purge_choice = '' while", "Config: Trimming type is not Quality/Adapter/Both.')) trigger = True quality_threshold", "trigger = True if not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "'XML Config: Specified quality threshold integer out of range (0-38).'))", "modeldescr_name = self.descriptor with open(modeldescr_name) as f: descr_text = f.read()", "DataClump(dict): \"\"\"Container object for datasets: dictionary-like object that exposes its", "log import numpy as np import csv from io import", "file if direction == 'fw': toutfi = open(temp_output, 'w') cat_process", "fails, close the object (memory) and raise an error if", "could not be found.')) trigger = True if not (reverse_reference.endswith('fa')", "not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "trigger = False ## ## Main configuration instance settings data_directory", "has invalid characters: \"', character, '\"')) trigger = True ##", "Colour.end, 'XML Config: Specified band_width integer is invalid.')) trigger=True seed_length_extension", "'{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = ''; cagend", "input file does not end in _R2. ', reverse_data)) sys.exit(2)", "and atypical reference into one file if direction == 'fw':", "our own objects if stage == 'gtype': pass def generate_atypical_xml(label,", "raise NameError ## ## To determine which binaries to check", "True/False.')) trigger = True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag", "class Colour: def __init__(self): pass purple = '\\033[95m' cyan =", "prime_clipping_penalty: if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "file for respective data set modeldescr_name = self.descriptor with open(modeldescr_name)", "= '\\033[91m' bold = '\\033[1m' underline = '\\033[4m' end =", "Prefix: ', jobname)) run_dir = os.path.join(output_root, jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir,", "for configuration file (just incase) if self.config_filename is None: log.error(\"No", "self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag == 'True' or genotype_flag == 'False'):", "== 1 else v for k, v in dd.items()}} ##", "adapter not valid selection.')) trigger = True forward_adapter = self.config_dict['trim_flags']['@forward_adapter']", "close summary_data = trim_lines[summary_start:summary_start + scraping_buffer] trpf.close() return summary_data[2:] ##", "is invalid.')) trigger=True band_width = self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating instance run directory..", "'rv': cagstart = '100'; cagend = '100' ccgstart = '1';", "quality_control == 'True': try:type_func('java') except NameError: trigger=True try:type_func('fastqc') except NameError:", "if children or t.attrib: if text: d[t.tag]['#text'] = text else:", "= config_filename self.dtd_filename = scriptdir + \"/config/config.dtd\" ## ## Check", "self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "based on date (for run ident) date = datetime.date.today().strftime('%d-%m-%Y') walltime", "data_root.append(loci_root) ## ## Loci Nodes fp_input = etree.Element('input', type='fiveprime', flank=fp_flank)", "len(v) == 1 else v for k, v in dd.items()}}", "or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "return false, path is invalid or unsupported files present so,", "Anchor summary_start = 0 for i in range(0, len(trim_lines)): if", "adapted from http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t): d = {t.tag: {}", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Atypical Realignment flag is", "'\\033[95m' cyan = '\\033[96m' darkcyan = '\\033[36m' blue = '\\033[94m'", "', Colour.end, 'XML Config: Specified skip_seed_with_occurrence integer is invalid.')) trigger=True", "try:type_func('generatr') except NameError: trigger=True if genotyping == 'True': try:type_func('samtools') except", "empty. Parameter raise_exception determines if a ValueError exception should be", "Check for configuration file (just incase) if self.config_filename is None:", "shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate() binary_subprocess.wait() if 'not found'.encode()", "', Colour.end, 'Output directories OK!')) return run_dir def replace_fqfile(mutate_list, target_fqfile,", "subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output = temp_output", "from http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t): d = {t.tag: {} if", "Trimming flag settings if sequence_qc_flag == 'True': trimming_type = self.config_dict['trim_flags']['@trim_type']", "not int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "', Colour.end, 'XML Config: Specified error tolerance is not 0.0", "quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling", "sequence=intv, prior='1') ccg_region = etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend)", "'I/O: Non-even number of input files specified. Cannot continue without", "Config: Invalid character detected in RV adapter sequence.')) trigger =", "== 'Quality' or trimming_type == 'Adapter' or trimming_type == 'Both'):", "unzipd.wait() return True def sequence_pairings(data_path, instance_rundir): ## ## Get input", "except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if genotyping ==", "'XML Config: Specified chain_drop float is invalid.')) trigger=True seeded_chain_drop =", "cagstart = ''; cagend = '' intv = allele_object.get_intervening() ccgstart", "self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\" Method which validates the", "string_repr = etree.tostring(self.config_file, pretty_print=True) element_tree = cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree)", "= input_list[input_object].lstrip().rstrip().split(' ')[0:2] return ' '.join(cleanse_target) else: return '*' def", "', Colour.end, 'XML Config: Specified quality threshold integer is invalid.'))", "raise_exception determines if a ValueError exception should be raised if", "is int: if stage == 3: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1]", "2): file_pair = {} forward_data = sorted_input[i] reverse_data = sorted_input[i+1]", "dictionary object, reader to be viewed through accessing the config_dict", "align_lines = alnrpf.readlines() alnrpf.close() ## ## No ranges required, only", "self.config_filename = config_filename self.dtd_filename = scriptdir + \"/config/config.dtd\" ## ##", "not minimum_length == '': if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "parsed_arguments.jobname: if character is ' ' or character is '/':", "', reverse_data)) sys.exit(2) ## ## Make Stage outputs for use", "not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "sequence.')) trigger = True error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance),", "alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence", "'shd__ ', Colour.end, 'XML Config: Specified error_rate is not a", "type='intervening', sequence=intv, prior='1') ccg_region = etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart,", "flag.')) trigger = True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not", "is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has invalid characters: \"',", "Given demultiplexing reverse adapter position invalid! [5P, 3P, AP]')) trigger", "'shd__ ', Colour.end, 'XML Config: Specified max_length is not a", "except AttributeError for -b style try: quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment", "OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass", "seq_match_score integer is invalid.')) trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if not", "self.config_dict['trim_flags']['@trim_type'] if not (trimming_type == 'Quality' or trimming_type == 'Adapter'", "N.')) continue else: break if purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__", "= True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in reverse_adapter: if", "Colour.end, 'Specified config file is not an XML file.')) trigger", "invalid.')) trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "= '<EMAIL>' ## ## Imports import string import os import", "= etree.Element('input', type='fiveprime', flank=fp_flank) cag_region = etree.Element('input', type='repeat_region', order='1', unit='CAG',", "style try: quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping =", "need to tidy up report for genotyping ## since we", "self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "OK!')) return run_dir def replace_fqfile(mutate_list, target_fqfile, altered_path): if target_fqfile in", "'' while True: purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job", "raise Exception('User chose not to delete pre-existing Job folder. Cannot", "', Colour.end, 'XML Config: Parsing parameters successful!')) class DataClump(dict): \"\"\"Container", "a valid integer.')) trigger = True maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if", "= self.config_dict['trim_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase not in", "label + '.fa') temp_output = os.path.join(index_path, label + '_concat.fa') gen_process", "'-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ## ## Join typical and", "trigger=True try: type_func('freebayes') except NameError: trigger=True return trigger def sanitise_outputs(jobname,", "raise_exception is False and the string is empty, True is", "not (sequence_qc_flag == 'True' or sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Config: Specified seq_match_score integer is invalid.')) trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty']", "unzipd = subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target], stderr=subprocess.PIPE) unzipd.wait() return", "parse info to dictionary, validate vs ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config()", "a fa/fas file.')) trigger = True if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]:", "', Colour.end, 'XML Config: Specified chain_drop float is invalid.')) trigger=True", "True snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag == 'True' or", "'Quality' or trimming_type == 'Adapter' or trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red,", "'_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC') align_path", "user specified root doesn't exist, make it ## Then make", "= True quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "or t.attrib: if text: d[t.tag]['#text'] = text else: d[t.tag] =", "is not set to True/False.')) trigger = True sequence_qc_flag =", "string is empty. False indicates the string is NOT empty.", "trigger def sanitise_outputs(jobname, output_argument): run_dir = '' output_root = output_argument[0]", "own objects if stage == 'gtype': pass def generate_atypical_xml(label, allele_object,", "FastQ/GZ data detected in specified input directory.')) trigger = True", "all gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output directories", "the settings dictionary is full of valid settings! \"\"\" trigger", "forward_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified gap_extend_penalty", "raised - else False is returned. \"\"\" if os.path.lexists(path): return", "glob.glob(os.path.join(input_data_directory, '*')) for extract_target in target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold,", "align_lines[1:] ## ## No need to tidy up report for", "self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "atypical reference into one file if direction == 'fw': toutfi", "not float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "directory.')) trigger = True forward_reference = self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference):", "= self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt) in range(1,5):", "try: type_func('freebayes') except NameError: trigger=True return trigger def sanitise_outputs(jobname, output_argument):", "')[0:1] return ''.join(cleanse_target) else: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2] return '", "'shd__ ', Colour.end, 'XML Config: Parsing parameters successful!')) class DataClump(dict):", "', Colour.end, 'XML Config: SNP Quality Cutoff value is not", "'\\033[4m' end = '\\033[0m' class ConfigReader(object): \"\"\" The configuration file", "This dictionary will be used for variables within the pipeline.", "error tolerance is not 0.0 < x < 1.0.')) trigger", "if character is ' ' or character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__", "(memory) and raise an error if not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD", "for i in range(0, len(sorted_input), 2): file_pair = {} forward_data", "Output with prefix: ', jobname)) run_dir = os.path.join(output_root, jobname) mkdir_p(run_dir)", "as xmlfi: xmlfi.write(s.decode()) xmlfi.close() return atypical_path def generate_reference(input_xml, index_path, ref_indexes,", "else: return '*' def mkdir_p(path): try: os.makedirs(path) except OSError as", "= self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "'align': with open(input_report_file, 'r') as alnrpf: align_lines = alnrpf.readlines() alnrpf.close()", "dictionary {key: value}. This dictionary will be used for variables", "with prefix: ', jobname)) run_dir = os.path.join(output_root, jobname) mkdir_p(run_dir) else:", "a valid integer.')) trigger = True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if", "is False and the string is empty, True is returned.", "error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "')) if not (purge_choice.lower() == 'y') and not (purge_choice.lower() ==", "gap_extend_penalty_raw.split(',') for individual_gaextend in gap_extend_penalty: if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "variables self.scriptdir = scriptdir self.config_filename = config_filename self.dtd_filename = scriptdir", "return cleanse_target else: return '*' def sanitise_alignment_output(input_object, input_list, stage): if", "reader to be viewed through accessing the config_dict variable. \"\"\"", "= recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\" Method which", "forward_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Trimming type is not", "None} children = list(t) ## ## If list was populated,", "ccgstart = '1'; ccgend = '20' ## ## Create XML", "dtd_file.close() log.error(\"DTD validation failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def", "## ## Takes the formatted xml doc, puts through generator,", "validates the configuration file's contents. If all pass, guarantees that", "'XML Config: Parsing parameters successful!')) class DataClump(dict): \"\"\"Container object for", "for use in everywhere else in pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1])", "be either a directory or file. If the path exists,", "self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "'XML Config: Specified error tolerance is not a valid float.'))", "'shd__ ', Colour.end, 'XML Config: Specified chain_drop float is invalid.'))", "def initialise_libraries(instance_params): trigger = False ## ## Subfunction for recycling", "array scraping_buffer = 8 if '-q' in trim_lines[1]: scraping_buffer +=", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seed_length_extension float is", "sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence", "binary_subprocess = subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate() binary_subprocess.wait()", "integer is invalid.')) trigger=True ## ## Genotype prediction flag settings", "(trimming_type == 'Quality' or trimming_type == 'Adapter' or trimming_type ==", "objects in preparation for bunch() data_file_name = self.database with open(data_file_name)", "datetime.datetime.now().strftime('%H%M%S') today = date + '-' + walltime ## If", "', forward_data)) sys.exit(2) ## ## Check reverse ends with R2", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified skip_seed_with_occurrence integer is", "by parameter string is empty. False indicates the string is", "or demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length == '': if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "'shd__ ', Colour.end, 'XML Config: Specified seq_match_score integer is invalid.'))", "(demultiplexing_flag == 'True' or demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "in gap_extend_penalty: if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Observation value", "try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if snp_calling", "0.0 < x < 1.0.')) trigger = True ## ##", "database, descriptor): self.database = database self.descriptor = descriptor def load_model(self):", "line return align_lines[1:] ## ## No need to tidy up", "= self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "', Colour.end, 'Detected compressed input data. Extracting!')) break for extract_target", "to True/False.')) trigger = True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if not", "if not (genotype_flag == 'True' or genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red,", "import subprocess import logging as log import numpy as np", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error_rate is not", "= etree.Element('data') loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ## ## Loci", "True ## ## Alignment flag settings if alignment_flag == 'True':", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Observation value invalid!", "+ '.fa') temp_output = os.path.join(index_path, label + '_concat.fa') gen_process =", "not be found.')) trigger = True if not (reverse_reference.endswith('fa') or", "not set to True/False.')) trigger = True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control']", "= True reverse_reference = self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Colour.end, 'XML Config: Specified forward reference file is not a", "= sorted(input_files) sequence_pairs = [] file_count = len(sorted_input) if not", "in target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected", "== 'True': try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True", "Specified reverse reference file could not be found.')) trigger =", "preparation for bunch() data_file_name = self.database with open(data_file_name) as f:", "os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified reverse reference", "prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for individual_prclip in prime_clipping_penalty:", "Config: FW and RV references have identical filenames. Will create", "trim_lines = trpf.readlines() ## ## Determine buffer size to slice", "user it's all gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end,", "in the situation where instance_params origin differs ## try for", "jobname) if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating Output", "import etree from reportlab.pdfgen import canvas class Colour: def __init__(self):", "forward_adapter demultiplexing flag.')) trigger = True forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if", "file (just incase) if self.config_filename is None: log.error(\"No configuration file", "example, a string with 'true', 't', 'y' or 'yes' will", "= '100'; cagend = '100' ccgstart = '1'; ccgend =", "Config: Sequence Alignment flag is not set to True/False.')) trigger", "= text else: d[t.tag] = text return d ## ##", "os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating Output with prefix: ',", "from our own objects if stage == 'gtype': pass def", "Config: Specified forward reference file could not be found.')) trigger", "['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red,", "prime_clipping_penalty integer(s) is(are) invalid.')) trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not", "if not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0]))", "indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',') for individual_indelpen in indel_penalty:", "'XML Config: Specified trimming adapter not valid selection.')) trigger =", "returned. If the path does not exist, and raise_exception is", "snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP", "If the path exists, True is returned. If the path", "an integer.')) trigger = True if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "type(input_object) is int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else: return", "if stage == 'align': with open(input_report_file, 'r') as alnrpf: align_lines", "DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value): \"\"\" Given a", "\"/config/config.dtd\" ## ## Check for configuration file (just incase) if", "value representing the string contents. For example, a string with", "= defaultdict(list) for dc in map(recursive_generation, children): for k, v", "sequence_pairings(data_path, instance_rundir): ## ## Get input files from data path", "trigger=True if snp_calling == 'True': try: type_func('picard') except NameError: trigger=True", "os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating output root... ')) mkdir_p(output_root)", "scraping_buffer += 1 ## ## Get Anchor summary_start = 0", "= glob.glob(os.path.join(input_data_directory, '*')) for extract_target in target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')):", "open(temp_output, 'w') cat_process = subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait()", "Config: Invalid character detected in FW adapter sequence.')) trigger =", "is returned. \"\"\" if string != '': return False if", "True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "the user specified root doesn't exist, make it ## Then", "Delete existing folder? Y/N: ')) if not (purge_choice.lower() == 'y')", "docstring label = input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path, label + '.fa')", "is invalid or unsupported files present so, quit \"\"\" trigger", "{} forward_data = sorted_input[i] reverse_data = sorted_input[i+1] ## ## Check", "found.')) trigger = True if not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red,", "or 'yes' will yield True. \"\"\" boolean_value = string.lower(boolean_value) in", "Parsing parameters successful!')) class DataClump(dict): \"\"\"Container object for datasets: dictionary-like", "out of range (0-38).')) trigger = True trim_adapters = ['-a','-g','-a$','-g^','-b']", "Colour.end, 'XML Config: Specified seed_length_extension float is invalid.')) trigger=True skip_seed_with_occurrence", "t.attrib else None} children = list(t) ## ## If list", "instance_params origin differs ## try for -c style, except AttributeError", "character detected in RV adapter sequence.')) trigger = True error_tolerance", "integer is invalid.')) trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension),", "open(input_report_file, 'r') as trpf: trim_lines = trpf.readlines() ## ## Determine", "etree.Element('input', type='fiveprime', flank=fp_flank) cag_region = etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart,", "underline = '\\033[4m' end = '\\033[0m' class ConfigReader(object): \"\"\" The", "for i, d in enumerate(data_file): data[i] = d[:-1] label =", "ident) date = datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S') today = date", "as log import numpy as np import csv from io", "instance_path = os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC') align_path =", "'XML Config: Specified seed_length_extension float is invalid.')) trigger=True skip_seed_with_occurrence =", "'-q', '-f', '-d', extract_target], stderr=subprocess.PIPE) unzipd.wait() return True def sequence_pairings(data_path,", "alignment.. if stage == 'align': with open(input_report_file, 'r') as alnrpf:", "and raise an error if not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation", "indel_penalty_raw.split(',') for individual_indelpen in indel_penalty: if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file is", "tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction == 'fw': cagstart = '1';", "Specified mismatch_penalty integer is invalid.')) trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty", "flag is not set to True/False.')) trigger = True alignment_flag", "for individual_prclip in prime_clipping_penalty: if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend) intervening = etree.Element('input', type='intervening', sequence=intv,", "= next(data_file) n_samples = int(temp[0]) n_features = int(temp[1]) data =", "the string is NOT empty. Parameter raise_exception determines if a", "SNP Observation value invalid! Please use 1-10.')) trigger = True", "report for genotyping ## since we already have the data", "not valid selection.')) trigger = True forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for", "'*' def mkdir_p(path): try: os.makedirs(path) except OSError as exc: if", "dd[k].append(v) d = {t.tag: {k: v[0] if len(v) == 1", "filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file could not", "return run_dir def replace_fqfile(mutate_list, target_fqfile, altered_path): if target_fqfile in mutate_list:", "int(temp[1]) data = np.empty((n_samples, n_features)) temp = next(data_file) feature_names =", "is not True/False.')) trigger = True ## ## Demultiplexing flag", "seeded_chain_drop integer is invalid.')) trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if not", "in t.attrib.items()) if t.text: text = t.text.strip() if children or", "Config: Given demultiplexing forward adapter position invalid! [5P, 3P, AP]'))", "= instance_params['snp_calling'] if quality_control == 'True': try:type_func('java') except NameError: trigger=True", "'XML Config: Specified min_length is not a valid integer.')) trigger", "'XML Config: Specified quality threshold integer is invalid.')) trigger =", "on linux input_files = glob.glob(os.path.join(data_path, '*')) sorted_input = sorted(input_files) sequence_pairs", "input_xml, '-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ## ## Join typical", "else: break if purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end,", "trigger=True try:type_func('cutadapt') except NameError: trigger=True if alignment == 'True': try:type_func('seqtk')", "= scriptdir + \"/config/config.dtd\" ## ## Check for configuration file", "return True return False def initialise_libraries(instance_params): trigger = False ##", "self.config_dict['@data_dir'] if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "Colour.end, 'XML Config: Specified data directory could not be found.'))", "= input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job folder already exists. Delete", "= instance_params['quality_control'] alignment = instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction'] snp_calling =", "Specified seeded_chain_drop integer is invalid.')) trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if", "os.path.lexists(path): return True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: FW and RV references", "\"\"\" Validate input config against DTD ruleset i.e. confirms conformation", "return atypical_path def generate_reference(input_xml, index_path, ref_indexes, direction): ##TODO docstring label", "type for checking binaries present ## Changed from WHICH as", "Non FastQ/GZ data detected in specified input directory.')) trigger =", "if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "that the settings dictionary is full of valid settings! \"\"\"", "datetime if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating output", "Opens a configuration file, and if valid, converts the parameters", "into a python dictionary {key: value}. This dictionary will be", "', Colour.end, 'XML Config: SNP Calling flag is not True/False.'))", "'== Summary ==' in trim_lines[i]: summary_start = i ## ##", "exists. Can be either a directory or file. If the", "integer.')) trigger = True maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length", "True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag == 'True' or", "def empty_string_check(string, raise_exception=True): \"\"\" Simple check to see if the", "## Check forward ends with R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if", "config_dict variable. \"\"\" def __init__(self, scriptdir, config_filename=None): ## ## Instance", "\"\"\" :param allele_object: :param index_path: :return: \"\"\" ##TODO docstring atypical_path", "If validation fails, close the object (memory) and raise an", "ValueError(\"Empty string detected!\") return True def sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check", "= False ## ## Jobname prefix validity check if parsed_arguments.jobname:", "reference file could not be found.')) trigger = True if", "DTD ruleset i.e. confirms conformation of XML structure \"\"\" ##", "'shd__ ', Colour.end, 'XML Config: Failure, exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green,", "Main configuration instance settings data_directory = self.config_dict['@data_dir'] if not os.path.exists(data_directory):", "is from trimming.. if stage == 'trim': with open(input_report_file, 'r')", "Will create indexing issue.')) trigger = True ## ## Instance", "minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_overlap is", "= {t.tag: {} if t.attrib else None} children = list(t)", "Colour.end, 'Detected compressed input data. Extracting!')) break for extract_target in", "1-10.')) trigger = True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit():", "'shd__ ', Colour.end, 'XML Config: Specified skip_seed_with_occurrence integer is invalid.'))", "True if not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "set_dictionary(self): \"\"\" Takes the now validated XML and extracts information", "keys if children: dd = defaultdict(list) for dc in map(recursive_generation,", "Colour.end, 'XML Config: FW and RV references have identical filenames.", "= '' intv = allele_object.get_intervening() ccgstart = ''; ccgend =", "int(temp[0]) n_features = int(temp[1]) data = np.empty((n_samples, n_features)) temp =", "8 if '-q' in trim_lines[1]: scraping_buffer += 1 ## ##", "def __init__(self, scriptdir, config_filename=None): ## ## Instance variables self.scriptdir =", "The configuration file reader. Opens a configuration file, and if", "raise_exception is set to True, an IOError is raised -", "children = list(t) ## ## If list was populated, create", "gen_process = subprocess.Popen(['generatr', '-i', input_xml, '-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait()", "= int(temp[0]) n_features = int(temp[1]) data = np.empty((n_samples, n_features)) temp", "empty. If raise_exception is False and the string is empty,", "'Specified config file could not be found.')) trigger = True", "type_func(binary): binary_result = [] binary_string = 'type {}'.format(binary) binary_subprocess =", "set to True/False.')) trigger = True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if", "or genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "sys.exit(2) ## ## Optimise so code isn't recycled for i", "'r') dtd_object = etree.DTD(dtd_file) ## ## If validation fails, close", "To determine which binaries to check for ## AttributeError in", "code isn't recycled for i in range(0, len(sorted_input), 2): file_pair", "validate_config(self): \"\"\" Method which validates the configuration file's contents. If", "else: return '*' def sanitise_alignment_output(input_object, input_list, stage): if type(input_object) is", "== 'fw': cagstart = '1'; cagend = '200' ccgstart =", "dc in map(recursive_generation, children): for k, v in dc.items(): dd[k].append(v)", "maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length == '': if not", "'100' ccgstart = '1'; ccgend = '20' ## ## Create", "preprocessing.LabelEncoder() le.fit(labels) hash_int_labels = le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text,", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Invalid character detected in", "should be raised if the string is empty. If raise_exception", "= True for xmlfile in parsed_arguments.config: if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red,", "with open(input_report_file, 'r') as alnrpf: align_lines = alnrpf.readlines() alnrpf.close() ##", "is(are) invalid.')) trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',') for", "= descriptor def load_model(self): ## Loads description file for respective", "if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else: raise Exception('User chose not", "prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for individual_prclip in prime_clipping_penalty: if not individual_prclip.isdigit():", "target_output def seek_target(input_list, target): for i in range(0, len(input_list)): if", "## Alignment flag settings if alignment_flag == 'True': min_seed_length =", "to True/False.')) trigger = True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if not", "Colour.end, 'XML Config: Specified error_rate is not a valid integer.'))", "Colour.end, 'XML Config: Invalid character detected in reverse_adapter demultiplexing flag.'))", "'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Atypical Realignment flag", "'shd__ ', Colour.end, 'XML Config: Specified reverse reference file could", "trimming adapter not valid selection.')) trigger = True forward_adapter =", "range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified quality threshold", "= 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = ''; cagend = '' intv =", "Genotype Prediction control flag is not True/False.')) trigger = True", "dtd_object = etree.DTD(dtd_file) ## ## If validation fails, close the", "= '' output_root = output_argument[0] if jobname: target_output = os.path.join(output_root,", "valid integer.')) trigger = True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if not", "trigger = True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag ==", "Specified forward reference file is not a fa/fas file.')) trigger", "ENCDR=le) def parse_boolean(boolean_value): \"\"\" Given a string (boolean_value), returns a", "recycled for i in range(0, len(sorted_input), 2): file_pair = {}", "= [] file_count = len(sorted_input) if not file_count % 2", "+ walltime ## If the user specified root doesn't exist,", "target): for i in range(0, len(input_list)): if target in input_list[i]:", "is set to True, an IOError is raised - else", "scriptdir, config_filename=None): ## ## Instance variables self.scriptdir = scriptdir self.config_filename", "d = {t.tag: {} if t.attrib else None} children =", "create dictionary, Append keys if children: dd = defaultdict(list) for", "text = t.text.strip() if children or t.attrib: if text: d[t.tag]['#text']", "= self.config_dict['@data_dir'] if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "linux input_files = glob.glob(os.path.join(data_path, '*')) sorted_input = sorted(input_files) sequence_pairs =", "jobname)) run_dir = os.path.join(output_root, jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir)", "Takes the now validated XML and extracts information from the", "## ## Create XML data_root = etree.Element('data') loci_root = etree.Element('loci',", "= True for fqfile in glob.glob(os.path.join(data_directory, '*')): if not (fqfile.endswith('.fq')", "a boolean value representing the string contents. For example, a", "Config: Invalid character detected in reverse_adapter demultiplexing flag.')) trigger =", "in [fp_input, cag_region, intervening, ccg_region, cct_region, tp_input]: loci_root.append(node) s =", "True, an IOError is raised - else False is returned.", "import sys import glob import datetime import subprocess import logging", "start=ccgstart, end=ccgend) cct_region = etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen))", "True elif not int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "exists. Delete existing folder? Y/N: ')) if not (purge_choice.lower() ==", "'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating instance run directory.. '))", "instance_path, seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair) return sequence_pairs def filesystem_exists_check(path, raise_exception=True):", "if t.attrib: d[t.tag].update(('@' + k, v) for k, v in", "Method which validates the configuration file's contents. If all pass,", "Quality control flag is not set to True/False.')) trigger =", "trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag in", "instance settings data_directory = self.config_dict['@data_dir'] if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "does not exist, and raise_exception is set to True, an", "except NameError: trigger=True if snp_calling == 'True': try: type_func('picard') except", "in _R2. ', reverse_data)) sys.exit(2) ## ## Make Stage outputs", "if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file", "(reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "Ensures root output is a real directory ## Generates folder", "detected!\") return True def sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check and check_input_files", "flag is not set to True/False.')) trigger = True atypical_flag", "already have the data from our own objects if stage", "trim_adapter_base = ['A', 'G', 'C', 'T'] if demultiplexing_flag == 'True':", "## Open > etree.DTD object dtd_file = open(self.dtd_filename, 'r') dtd_object", "Append keys if children: dd = defaultdict(list) for dc in", "forward adapter position invalid! [5P, 3P, AP]')) trigger = True", "for k, v in dc.items(): dd[k].append(v) d = {t.tag: {k:", "i in range(0, len(sorted_input), 2): file_pair = {} forward_data =", "through generator, returns dictionary string_repr = etree.tostring(self.config_file, pretty_print=True) element_tree =", "binary_result = binary_subprocess.communicate() binary_subprocess.wait() if 'not found'.encode() in binary_result[0] or", "stage == 'trim': with open(input_report_file, 'r') as trpf: trim_lines =", "import defaultdict from xml.etree import cElementTree from lxml import etree", "forward_data)) sys.exit(2) ## ## Check reverse ends with R2 reverse_data_name", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified forward reference file", "'Invalid input. Please input Y or N.')) continue else: break", "directory.. ')) mkdir_p(run_dir) ## Inform user it's all gonna be", "'_concat.fa') gen_process = subprocess.Popen(['generatr', '-i', input_xml, '-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE)", "summary_start = i ## ## Slice and close summary_data =", "instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control", "[5P, 3P, AP]')) trigger = True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for", "descriptor): self.database = database self.descriptor = descriptor def load_model(self): ##", "'XML Config: Specified forward reference file could not be found.'))", "= '\\033[4m' end = '\\033[0m' class ConfigReader(object): \"\"\" The configuration", "fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Non FastQ/GZ data", "mode check if parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag == 'True' or genotype_flag", "os.path.join(index_path, label + '_concat.fa') gen_process = subprocess.Popen(['generatr', '-i', input_xml, '-o',", "is full of valid settings! \"\"\" trigger = False ##", "order='1', unit='CAG', start=cagstart, end=cagend) intervening = etree.Element('input', type='intervening', sequence=intv, prior='1')", "', Colour.end, 'XML Config: Specified unpaired_pairing_penalty integer is invalid.')) trigger=True", "True is returned. If the path does not exist, and", "stderr=subprocess.PIPE) unzipd.wait() return True def sequence_pairings(data_path, instance_rundir): ## ## Get", "def set_dictionary(self): \"\"\" Takes the now validated XML and extracts", "= input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target) else: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2]", "n_features)) temp = next(data_file) feature_names = np.array(temp) labels = []", "character in parsed_arguments.jobname: if character is ' ' or character", "make the run directory for datetime if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold,", "'XML Config: SNP Observation value invalid! Please use 1-10.')) trigger", "pretty_print=True) element_tree = cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]]", "cagend = '' intv = allele_object.get_intervening() ccgstart = ''; ccgend", "not be found.')) trigger = True for fqfile in glob.glob(os.path.join(data_directory,", "and if valid, converts the parameters within the file to", "flag settings if sequence_qc_flag == 'True': trimming_type = self.config_dict['trim_flags']['@trim_type'] if", "def __init__(self, database, descriptor): self.database = database self.descriptor = descriptor", "trigger = True if not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "= self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "'shd__ ', Colour.end, 'XML Config: Specified reverse reference file is", "unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified unpaired_pairing_penalty integer", "error if not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation failure {0}: {1}\".format(self.config_filename,", "close the object (memory) and raise an error if not", "= instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling'] if quality_control == 'True': try:type_func('java')", "not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified prime_clipping_penalty", "from WHICH as apparently type functions over different shells/config files", "Can be either a directory or file. If the path", "xmlfi.write(s.decode()) xmlfi.close() return atypical_path def generate_reference(input_xml, index_path, ref_indexes, direction): ##TODO", "snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt) in", "if os.path.lexists(path): return True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path", "date + '-' + walltime ## If the user specified", "adapter position invalid! [5P, 3P, AP]')) trigger = True error_rate", "sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag == 'True' or sequence_qc_flag", "int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else: return '*' def", "0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O: Non-even number of input", "'shd__ ', Colour.end, 'XML Config: Specified seeded_chain_drop integer is invalid.'))", "ruleset i.e. confirms conformation of XML structure \"\"\" ## ##", "'-f', '-d', extract_target], stderr=subprocess.PIPE) unzipd.wait() return True def sequence_pairings(data_path, instance_rundir):", "NameError: trigger=True try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True", "'XML Config: Genotype Prediction control flag is not True/False.')) trigger", "Config: SNP Observation value invalid! Please use 1-10.')) trigger =", "\"\"\" Takes the now validated XML and extracts information from", "self.set_dictionary() self.validate_config() def validate_against_dtd(self): \"\"\" Validate input config against DTD", "file_count = len(sorted_input) if not file_count % 2 == 0:", "in range(0, len(trim_lines)): if '== Summary ==' in trim_lines[i]: summary_start", "not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file does not end", "scriptdir + \"/config/config.dtd\" ## ## Check for configuration file (just", "= '\\033[94m' green = '\\033[92m' yellow = '\\033[93m' red =", "'Clearing pre-existing Jobname Prefix: ', jobname)) run_dir = os.path.join(output_root, jobname)", "not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating output root... '))", "flag is not True/False.')) trigger = True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction']", "the path exists, True is returned. If the path does", "run_dir = '' output_root = output_argument[0] if jobname: target_output =", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seq_match_score integer is", "in forward_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else: raise Exception('User chose not to", "= subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target], stderr=subprocess.PIPE) unzipd.wait() return True", "extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory, '*')) for extract_target in target_files: if", "', Colour.end, 'Clearing pre-existing Jobname Prefix: ', jobname)) run_dir =", "output.') else: ## Ensures root output is a real directory", "predict_path = os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root] = [forward_data, reverse_data, instance_path,", "'shd__ ', Colour.end, 'Specified config file could not be found.'))", "Colour.end, 'XML Config: Specified reverse reference file is not a", "== 'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "== 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid input. Please input", "etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ## ## Loci Nodes fp_input = etree.Element('input',", "dtd_file = open(self.dtd_filename, 'r') dtd_object = etree.DTD(dtd_file) ## ## If", "'Specified config file is not an XML file.')) trigger =", "slice from above array scraping_buffer = 8 if '-q' in", "seek_target(input_list, target): for i in range(0, len(input_list)): if target in", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified trimming adapter not", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified reverse reference file", "stderr=subprocess.PIPE) gen_process.wait() ## ## Join typical and atypical reference into", "Config: Specified forward reference file is not a fa/fas file.'))", "## Takes the formatted xml doc, puts through generator, returns", "element_tree = cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def", "log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing pre-existing Jobname Prefix: ', jobname))", "if parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified", "dc.items(): dd[k].append(v) d = {t.tag: {k: v[0] if len(v) ==", "integer is invalid.')) trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit():", "= t.text.strip() if children or t.attrib: if text: d[t.tag]['#text'] =", "create indexing issue.')) trigger = True ## ## Instance flag", "'shd__ ', Colour.end, 'XML Config: Specified gap_extend_penalty integer(s) is(are) invalid.'))", "not an XML file.')) trigger = True return trigger def", "= date + '-' + walltime ## If the user", "variables within the pipeline. Recursion adapted from http://stackoverflow.com/a/9286702 \"\"\" def", "Check reverse ends with R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if not", "= '200' ccgstart = '1'; ccgend = '20' if direction", "band_width = self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "AttributeError for -b style try: quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment =", "'shd__ ', Colour.end, 'XML Config: Sequence Quality control flag is", "from xml.etree import cElementTree from lxml import etree from reportlab.pdfgen", "if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase not in trim_adapter_base:", "if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "dictionary, validate vs ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config() def validate_against_dtd(self): \"\"\"", "(for run ident) date = datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S') today", "exist, and raise_exception is set to True, an IOError is", "'T'] if demultiplexing_flag == 'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase", "root doesn't exist, make it ## Then make the run", "__version__ = '1.0' __author__ = '<EMAIL>' ## ## Imports import", "ccg_region = etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend) cct_region =", "index_path: :return: \"\"\" ##TODO docstring atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label,", "or character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has invalid", "Validate input config against DTD ruleset i.e. confirms conformation of", "= etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend) intervening = etree.Element('input',", "'XML Config: Sequence Alignment flag is not set to True/False.'))", "f.read() ## Loads data set from csv, into objects in", "not be found.')) return False def check_input_files(input_format, input_file): if input_file.endswith(input_format):", "= subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate() binary_subprocess.wait() if", "ordering isn't screwy on linux input_files = glob.glob(os.path.join(data_path, '*')) sorted_input", "Specified error_rate is not a valid integer.')) trigger = True", "', Colour.end, 'XML Config: Specified error_rate is not a valid", "configuration instance settings data_directory = self.config_dict['@data_dir'] if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red,", "ccgstart = ''; ccgend = '' ccglen = allele_object.get_ccg() cctlen", "NameError: trigger=True try: type_func('freebayes') except NameError: trigger=True return trigger def", "except AttributeError: quality_control = instance_params['quality_control'] alignment = instance_params['sequence_alignment'] genotyping =", "= self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',') for individual_gaextend in gap_extend_penalty: if", "'Creating output root... ')) mkdir_p(output_root) run_dir = os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold,", "enumerate(data_file): data[i] = d[:-1] label = d[-1] labels.append(label) le =", "Config: Specified error_rate is not a valid integer.')) trigger =", "genotyping == 'True': try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError:", "'1.0' __author__ = '<EMAIL>' ## ## Imports import string import", "files present so, quit \"\"\" trigger = False ## ##", "list was populated, create dictionary, Append keys if children: dd", "= self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\" Method which validates the configuration", "not True/False.')) trigger = True ## ## Demultiplexing flag settings", "self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = ''; cagend = '' intv", "references have identical filenames. Will create indexing issue.')) trigger =", "NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if genotyping == 'True':", "if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "if not (fqfile.endswith('.fq') or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red,", "is not a valid integer.')) trigger = True minimum_length =", "if purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing pre-existing", "## Generates folder name based on date (for run ident)", "feature_names = np.array(temp) labels = [] for i, d in", "demultiplexing flag.')) trigger = True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position", "Generates folder name based on date (for run ident) date", "= 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction == 'fw': cagstart = '1'; cagend", "os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and", "['A', 'G', 'C', 'T'] if demultiplexing_flag == 'True': forward_adapter =", "= True error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red,", "= datetime.datetime.now().strftime('%H%M%S') today = date + '-' + walltime ##", "not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "converts the parameters within the file to a dictionary object,", "data path ## Sort so that ordering isn't screwy on", "not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seeded_chain_drop", "set modeldescr_name = self.descriptor with open(modeldescr_name) as f: descr_text =", "file reader. Opens a configuration file, and if valid, converts", "float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error tolerance", "d ## ## Takes the formatted xml doc, puts through", "## ## Demultiplexing flag settings trim_adapter_base = ['A', 'G', 'C',", "Colour.end, 'XML Config: Specified seq_match_score integer is invalid.')) trigger=True mismatch_penalty", "if either return false, path is invalid or unsupported files", "input_file.endswith(input_format): return True return False def initialise_libraries(instance_params): trigger = False", "= '_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC')", "== 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Genotype Prediction", "Name has invalid characters: \"', character, '\"')) trigger = True", "True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag == 'True' or", "def sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check and check_input_files if either return", "## ## Values for key if t.attrib: d[t.tag].update(('@' + k,", "return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value): \"\"\" Given", "= True elif not int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified gap_extend_penalty integer(s) is(are)", "float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "= self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence Quality control flag", "string is NOT empty. Parameter raise_exception determines if a ValueError", "valid selection.')) trigger = True forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for charbase", "if not (purge_choice.lower() == 'y') and not (purge_choice.lower() == 'n'):", "fp_input = etree.Element('input', type='fiveprime', flank=fp_flank) cag_region = etree.Element('input', type='repeat_region', order='1',", "Sort so that ordering isn't screwy on linux input_files =", "= '1'; ccgend = '20' ## ## Create XML data_root", "to a dictionary object, reader to be viewed through accessing", "'yes' will yield True. \"\"\" boolean_value = string.lower(boolean_value) in ('yes',", "',Colour.end,'Missing binary: ', binary, '!')) raise NameError ## ## To", "file. If the path exists, True is returned. If the", "not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2)", "trigger=True try:type_func('generatr') except NameError: trigger=True if genotyping == 'True': try:type_func('samtools')", "= np.array(temp) labels = [] for i, d in enumerate(data_file):", "Colour.end, 'XML Config: Specified min_length is not a valid integer.'))", "in map(recursive_generation, children): for k, v in dc.items(): dd[k].append(v) d", "## Get input files from data path ## Sort so", "instance_params['quality_control'] alignment = instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling']", "= prime_clipping_penalty_raw.split(',') for individual_prclip in prime_clipping_penalty: if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "an error if not dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation failure {0}:", "Colour.end, 'Clearing pre-existing Jobname Prefix: ', jobname)) run_dir = os.path.join(output_root,", "PyPDF2 from sklearn import preprocessing from collections import defaultdict from", "False is returned. \"\"\" if os.path.lexists(path): return True if raise_exception:", "## ## Get Anchor summary_start = 0 for i in", "pre-existing Jobname Prefix: ', jobname)) run_dir = os.path.join(output_root, jobname) if", "Loads description file for respective data set modeldescr_name = self.descriptor", "input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job folder already exists. Delete existing", "settings data_directory = self.config_dict['@data_dir'] if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "integer(s) is(are) invalid.')) trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',')", "type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input = etree.Element('input', type='threeprime', flank=tp_flank)", "file is not a fa/fas file.')) trigger = True reverse_reference", "pipeline. Recursion adapted from http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t): d =", "', Colour.end, 'XML Config: Specified gap_extend_penalty integer(s) is(are) invalid.')) trigger=True", "be found.')) trigger = True for fqfile in glob.glob(os.path.join(data_directory, '*')):", "'XML Config: Specified seeded_chain_drop integer is invalid.')) trigger=True seq_match_score =", "'200' ccgstart = '1'; ccgend = '20' if direction ==", "delete pre-existing Job folder. Cannot write output.') else: ## Ensures", "valid integer.')) trigger = True ## ## Trimming flag settings", "sys.exit(2) dtd_file.close() def set_dictionary(self): \"\"\" Takes the now validated XML", "Check forward ends with R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if not", "to be viewed through accessing the config_dict variable. \"\"\" def", "= ''; cagend = '' intv = allele_object.get_intervening() ccgstart =", "Colour.end, 'Invalid input. Please input Y or N.')) continue else:", "walltime ## If the user specified root doesn't exist, make", "self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "Colour.end, 'XML Config: Trimming type is not Quality/Adapter/Both.')) trigger =", "root output is a real directory ## Generates folder name", "True forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase", "unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input = etree.Element('input', type='threeprime', flank=tp_flank) for node", "= 'type {}'.format(binary) binary_subprocess = subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result", "not a valid integer.')) trigger = True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap']", "= os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart =", "'shd__ ', Colour.end, 'Creating output root... ')) mkdir_p(output_root) run_dir =", "== 'True' or atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "compressed input data. Extracting!')) break for extract_target in target_files: unzipd", "generate_reference(input_xml, index_path, ref_indexes, direction): ##TODO docstring label = input_xml.split('/')[-1].split('.')[0] target_output", "= True if trigger: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "except NameError: trigger=True return trigger def sanitise_outputs(jobname, output_argument): run_dir =", "if forward_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "raised if the string is empty. If raise_exception is False", "reverse ends with R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'):", "True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not in ['5P', '3P',", "self.config_dict['trim_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase not in trim_adapter_base:", "True maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length == '': if", "return align_lines[1:] ## ## No need to tidy up report", "allele_object: :param index_path: :return: \"\"\" ##TODO docstring atypical_path = os.path.join(index_path,", "if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "blue = '\\033[94m' green = '\\033[92m' yellow = '\\033[93m' red", "is invalid.')) trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop), float):", "green = '\\033[92m' yellow = '\\033[93m' red = '\\033[91m' bold", "seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "'y', 'true', 't', '1') return boolean_value def empty_string_check(string, raise_exception=True): \"\"\"", "'*')) sorted_input = sorted(input_files) sequence_pairs = [] file_count = len(sorted_input)", "invalid! Please use 1-10.')) trigger = True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff']", "binary, '!')) raise NameError ## ## To determine which binaries", "info to dictionary, validate vs ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config() def", "check_input_files(input_format, input_file): if input_file.endswith(input_format): return True return False def initialise_libraries(instance_params):", "is NOT empty. Parameter raise_exception determines if a ValueError exception", "all pass, guarantees that the settings dictionary is full of", "detected in forward_adapter demultiplexing flag.')) trigger = True forward_position =", "end=ccgend) cct_region = etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input", "label = input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path, label + '.fa') temp_output", "'shd__ ', Colour.end, 'XML Config: Trimming type is not Quality/Adapter/Both.'))", "+ '-' + walltime ## If the user specified root", "!= '': return False if raise_exception: raise ValueError(\"Empty string detected!\")", "not end in _R2. ', reverse_data)) sys.exit(2) ## ## Make", "True reverse_reference = self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "[] for i, d in enumerate(data_file): data[i] = d[:-1] label", "= etree.DTD(dtd_file) ## ## If validation fails, close the object", "if stage == 'trim': with open(input_report_file, 'r') as trpf: trim_lines", "type is not Quality/Adapter/Both.')) trigger = True quality_threshold = self.config_dict['trim_flags']['@quality_threshold']", "xmlfile in parsed_arguments.config: if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "open(data_file_name) as f: data_file = csv.reader(f) temp = next(data_file) n_samples", "not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file could", "extract_target in target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end,", "v for k, v in dd.items()}} ## ## Values for", "== '': if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Demultiplexing", "replace_fqfile(mutate_list, target_fqfile, altered_path): if target_fqfile in mutate_list: loc = mutate_list.index(target_fqfile)", "'XML Config: SNP Quality Cutoff value is not an integer.'))", "type_func('freebayes') except NameError: trigger=True return trigger def sanitise_outputs(jobname, output_argument): run_dir", "== 'rv': cagstart = '100'; cagend = '100' ccgstart =", "'Align') predict_path = os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root] = [forward_data, reverse_data,", "could not be found.')) trigger = True for fqfile in", "= [] for i, d in enumerate(data_file): data[i] = d[:-1]", "return ''.join(cleanse_target) else: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2] return ' '.join(cleanse_target)", "= os.path.join(output_root, jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else: raise", "'XML Config: Atypical Realignment flag is not True/False.')) trigger =", "= etree.tostring(data_root, pretty_print=True) with open(atypical_path, 'w') as xmlfi: xmlfi.write(s.decode()) xmlfi.close()", "', jobname)) run_dir = os.path.join(output_root, jobname) mkdir_p(run_dir) else: purge_choice =", "reverse_data)) sys.exit(2) ## ## Make Stage outputs for use in", "= trim_lines[summary_start:summary_start + scraping_buffer] trpf.close() return summary_data[2:] ## ## If", "\"\"\" trigger = False ## ## Jobname prefix validity check", "string with 'true', 't', 'y' or 'yes' will yield True.", "different shells/config files def type_func(binary): binary_result = [] binary_string =", "scrape_summary_data(stage, input_report_file): ## ## If the argument input_report_file is from", "If the user specified root doesn't exist, make it ##", "confirms conformation of XML structure \"\"\" ## ## Open >", "',Colour.end,'Specified Job Name has invalid characters: \"', character, '\"')) trigger", "keys as attributes.\"\"\" def __init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__ =", "align_path = os.path.join(instance_rundir, sample_root, 'Align') predict_path = os.path.join(instance_rundir, sample_root, 'Predict')", "self.config_filename is None: log.error(\"No configuration file specified!\") else: self.config_file =", "jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else: raise Exception('User chose", "os.path.join(output_root, jobname) if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating", "input_files = glob.glob(os.path.join(data_path, '*')) sorted_input = sorted(input_files) sequence_pairs = []", "or unsupported files present so, quit \"\"\" trigger = False", "= self class DataLoader: def __init__(self, database, descriptor): self.database =", "or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ', binary, '!')) raise NameError", "['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Given", "trigger = True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag ==", "origin differs ## try for -c style, except AttributeError for", "'true', 't', 'y' or 'yes' will yield True. \"\"\" boolean_value", "self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "order='2', unit='CCG', start=ccgstart, end=ccgend) cct_region = etree.Element('input', type='repeat_region', order='3', unit='CCT',", "directory ## Generates folder name based on date (for run", "argument input_report_file is from trimming.. if stage == 'trim': with", "stage == 3: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target) else:", "Config: Specified error tolerance is not a valid float.')) trigger", "is not set to True/False.')) trigger = True alignment_flag =", "the path does not exist, and raise_exception is set to", "if not minimum_length == '': if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Config: Specified min_seed_length integer is invalid.')) trigger=True band_width = self.config_dict['alignment_flags']['@band_width']", "'XML Config: Invalid character detected in FW adapter sequence.')) trigger", "is from alignment.. if stage == 'align': with open(input_report_file, 'r')", "= alnrpf.readlines() alnrpf.close() ## ## No ranges required, only skip", "is invalid.')) trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension), float):", "allele_object.get_intervening() ccgstart = ''; ccgend = '' ccglen = allele_object.get_ccg()", "if not (demultiplexing_flag == 'True' or demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red,", "## ## Subfunction for recycling code ## Calls UNIX type", "to True, an IOError is raised - else False is", "in parsed_arguments.jobname: if character is ' ' or character is", "trim_lines[1]: scraping_buffer += 1 ## ## Get Anchor summary_start =", "= etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend) cct_region = etree.Element('input',", "= self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for individual_prclip in prime_clipping_penalty: if", "character detected in reverse_adapter demultiplexing flag.')) trigger = True reverse_position", "True def sequence_pairings(data_path, instance_rundir): ## ## Get input files from", "= self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "'shd__ ', Colour.end, 'XML Config: SNP Quality Cutoff value is", "'20' if direction == 'rv': cagstart = '100'; cagend =", "= '\\033[0m' class ConfigReader(object): \"\"\" The configuration file reader. Opens", "prime_clipping_penalty_raw.split(',') for individual_prclip in prime_clipping_penalty: if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "for key if t.attrib: d[t.tag].update(('@' + k, v) for k,", "'\\033[36m' blue = '\\033[94m' green = '\\033[92m' yellow = '\\033[93m'", "parameters within the file to a dictionary object, reader to", "open(atypical_path, 'w') as xmlfi: xmlfi.write(s.decode()) xmlfi.close() return atypical_path def generate_reference(input_xml,", "invalid.')) trigger=True band_width = self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "## ## Optimise so code isn't recycled for i in", "= instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control = instance_params['quality_control']", "< x < 1.0.')) trigger = True ## ## Alignment", "file.')) trigger = True reverse_reference = self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference):", "settings if alignment_flag == 'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if not", "check for ## AttributeError in the situation where instance_params origin", "= self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length == '': if not minimum_length.isdigit():", "k, v in t.attrib.items()) if t.text: text = t.text.strip() if", "'shd__ ', Colour.end, 'XML Config: Specified seed_length_extension float is invalid.'))", "characters: \"', character, '\"')) trigger = True ## ## Config", "False indicates the string is NOT empty. Parameter raise_exception determines", "binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ', binary, '!')) raise", "= '1'; cagend = '200' ccgstart = '1'; ccgend =", "Config: Specified mismatch_penalty integer is invalid.')) trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty']", "jobname) mkdir_p(run_dir) else: purge_choice = '' while True: purge_choice =", "reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: FW and RV", "returns dictionary string_repr = etree.tostring(self.config_file, pretty_print=True) element_tree = cElementTree.XML(string_repr) self.config_dict", "'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Trimming type is", "'XML Config: Specified skip_seed_with_occurrence integer is invalid.')) trigger=True chain_drop =", "', Colour.end, 'XML Config: Invalid character detected in RV adapter", "True error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "if not snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "empty_string_check(string, raise_exception=True): \"\"\" Simple check to see if the string", "from alignment.. if stage == 'align': with open(input_report_file, 'r') as", "return boolean_value def empty_string_check(string, raise_exception=True): \"\"\" Simple check to see", "Specified gap_extend_penalty integer(s) is(are) invalid.')) trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty", "sanitise_trimming_output(input_object, input_list): if type(input_object) is int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return", "d in enumerate(data_file): data[i] = d[:-1] label = d[-1] labels.append(label)", "returned. \"\"\" if os.path.lexists(path): return True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified", "'XML Config: Specified max_length is not a valid integer.')) trigger", "if direction == 'fw': cagstart = '1'; cagend = '200'", "if self.config_filename is None: log.error(\"No configuration file specified!\") else: self.config_file", "instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling']", "cct_region = etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input =", "int: if stage == 3: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1] return", "= self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag == 'True' or demultiplexing_flag ==", "d[t.tag]['#text'] = text else: d[t.tag] = text return d ##", "', Colour.end, 'XML Config: Specified reverse reference file could not", "> etree.DTD object dtd_file = open(self.dtd_filename, 'r') dtd_object = etree.DTD(dtd_file)", "except NameError: trigger=True try:type_func('fastqc') except NameError: trigger=True try:type_func('cutadapt') except NameError:", "## Slice and close summary_data = trim_lines[summary_start:summary_start + scraping_buffer] trpf.close()", "trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Invalid character detected", "Config: SNP Calling flag is not True/False.')) trigger = True", "',Colour.end,'I/O: Reverse input file does not end in _R2. ',", "if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "break if purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing", "pass purple = '\\033[95m' cyan = '\\033[96m' darkcyan = '\\033[36m'", "return sequence_pairs def filesystem_exists_check(path, raise_exception=True): \"\"\" Checks to see if", "AttributeError in the situation where instance_params origin differs ## try", "etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend) intervening = etree.Element('input', type='intervening',", "v in dd.items()}} ## ## Values for key if t.attrib:", "for recycling code ## Calls UNIX type for checking binaries", "import csv from io import StringIO import PyPDF2 from sklearn", "trigger=True ## ## Genotype prediction flag settings if genotype_flag ==", "self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "either return false, path is invalid or unsupported files present", "data_root = etree.Element('data') loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root) ## ##", "in dd.items()}} ## ## Values for key if t.attrib: d[t.tag].update(('@'", "mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified mismatch_penalty integer", "Colour.end, 'XML Config: Invalid character detected in forward_adapter demultiplexing flag.'))", "Inform user it's all gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ',", "index_path, ref_indexes, direction): ##TODO docstring label = input_xml.split('/')[-1].split('.')[0] target_output =", "as np import csv from io import StringIO import PyPDF2", "of range (0-38).')) trigger = True trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Failure, exiting.')) sys.exit(2) else:", "', Colour.end, 'XML Config: Specified data directory could not be", "atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Atypical", "is not a valid integer.')) trigger = True minimum_overlap =", "data set from csv, into objects in preparation for bunch()", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified mismatch_penalty integer is", "incase) if self.config_filename is None: log.error(\"No configuration file specified!\") else:", "unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "', Colour.end, 'XML Config: Specified indel_penalty integer(s) is(are) invalid.')) trigger=True", "if alignment == 'True': try:type_func('seqtk') except NameError: trigger=True try:type_func('bwa') except", "return i def sanitise_trimming_output(input_object, input_list): if type(input_object) is int: cleanse_target", "if not (snpcall_flag == 'True' or snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red,", "if genotyping == 'True': try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except", "== '': if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "not 0.0 < x < 1.0.')) trigger = True ##", "if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "')[0:2] return ' '.join(cleanse_target) else: return '*' def mkdir_p(path): try:", "v in dc.items(): dd[k].append(v) d = {t.tag: {k: v[0] if", "run ident) date = datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S') today =", "stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate() binary_subprocess.wait() if 'not found'.encode() in", "from trimming.. if stage == 'trim': with open(input_report_file, 'r') as", "to tidy up report for genotyping ## since we already", "path is invalid or unsupported files present so, quit \"\"\"", "(alignment_flag == 'True' or alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "Specified min_seed_length integer is invalid.')) trigger=True band_width = self.config_dict['alignment_flags']['@band_width'] if", "== 'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in forward_adapter: if", "input_list[input_object].lstrip().rstrip().split(' ')[0:2] return ' '.join(cleanse_target) else: return '*' def mkdir_p(path):", "already exists. Delete existing folder? Y/N: ')) if not (purge_choice.lower()", "= self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag == 'True' or alignment_flag ==", "'*' def sanitise_alignment_output(input_object, input_list, stage): if type(input_object) is int: if", "glob import datetime import subprocess import logging as log import", "'XML Config: Specified min_overlap is not a valid integer.')) trigger", "trigger = True snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag ==", "not a valid float.')) trigger = True if not float(error_tolerance)", "cElementTree from lxml import etree from reportlab.pdfgen import canvas class", "'XML Config: Specified data directory could not be found.')) trigger", "jobname)) run_dir = os.path.join(output_root, jobname) mkdir_p(run_dir) else: purge_choice = ''", "reverse_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified prime_clipping_penalty integer(s)", "not int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "etree.tostring(self.config_file, pretty_print=True) element_tree = cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree) self.config_dict =", "input_report_file is from alignment.. if stage == 'align': with open(input_report_file,", "Prediction control flag is not True/False.')) trigger = True snpcall_flag", "'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid input. Please input Y", "not (adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "'True': try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if", "== 'True' or genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "for k, v in t.attrib.items()) if t.text: text = t.text.strip()", "integer.')) trigger = True minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length", "in target_files: unzipd = subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target], stderr=subprocess.PIPE)", "specified. Cannot continue without pairing!')) sys.exit(2) ## ## Optimise so", "'XML Config: Specified reverse reference file could not be found.'))", "## ## Instance variables self.scriptdir = scriptdir self.config_filename = config_filename", "= '1.0' __author__ = '<EMAIL>' ## ## Imports import string", "'XML Config: Specified error tolerance is not 0.0 < x", "failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def set_dictionary(self): \"\"\" Takes", "so that ordering isn't screwy on linux input_files = glob.glob(os.path.join(data_path,", "settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag == 'True' or", "sanitise_alignment_output(input_object, input_list, stage): if type(input_object) is int: if stage ==", "def extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory, '*')) for extract_target in target_files:", "if reverse_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "identical filenames. Will create indexing issue.')) trigger = True ##", "Colour: def __init__(self): pass purple = '\\033[95m' cyan = '\\033[96m'", "if not file_count % 2 == 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "text: d[t.tag]['#text'] = text else: d[t.tag] = text return d", "os import errno import shutil import sys import glob import", "AP]')) trigger = True error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit():", "set to True, an IOError is raised - else False", "', Colour.end, 'XML Config: SNP Observation value invalid! Please use", "temp_output return target_output def seek_target(input_list, target): for i in range(0,", "'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase", "'XML Config: Trimming type is not Quality/Adapter/Both.')) trigger = True", "'shd__ ', Colour.end, 'XML Config: Specified forward reference file could", "preprocessing from collections import defaultdict from xml.etree import cElementTree from", "alignment_flag == 'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "Open > etree.DTD object dtd_file = open(self.dtd_filename, 'r') dtd_object =", "settings trim_adapter_base = ['A', 'G', 'C', 'T'] if demultiplexing_flag ==", "label, allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = ''; cagend =", "-c style, except AttributeError for -b style try: quality_control =", "reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not in ['5P', '3P', 'AP']:", "that ordering isn't screwy on linux input_files = glob.glob(os.path.join(data_path, '*'))", "== 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O: Non-even number of", "in input_list[i]: return i def sanitise_trimming_output(input_object, input_list): if type(input_object) is", "import errno import shutil import sys import glob import datetime", "flag settings if genotype_flag == 'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if", "as trpf: trim_lines = trpf.readlines() ## ## Determine buffer size", "error_rate is not a valid integer.')) trigger = True minimum_overlap", "import logging as log import numpy as np import csv", "', jobname)) run_dir = os.path.join(output_root, jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True)", "log.error(\"DTD validation failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def set_dictionary(self):", "% 2 == 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O: Non-even", "k, v in dc.items(): dd[k].append(v) d = {t.tag: {k: v[0]", "quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified quality threshold", "darkcyan = '\\033[36m' blue = '\\033[94m' green = '\\033[92m' yellow", "sys.exit(2) ## ## Check reverse ends with R2 reverse_data_name =", "trigger = True for fqfile in glob.glob(os.path.join(data_directory, '*')): if not", "validation fails, close the object (memory) and raise an error", "etree from reportlab.pdfgen import canvas class Colour: def __init__(self): pass", "settings! \"\"\" trigger = False ## ## Main configuration instance", "or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "ccgend = '' ccglen = allele_object.get_ccg() cctlen = allele_object.get_cct() tp_flank", "__init__(self, scriptdir, config_filename=None): ## ## Instance variables self.scriptdir = scriptdir", "'shd__ ', Colour.end, 'I/O: Non-even number of input files specified.", "sequence_pairs = [] file_count = len(sorted_input) if not file_count %", "validation failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def set_dictionary(self): \"\"\"", "variable. \"\"\" def __init__(self, scriptdir, config_filename=None): ## ## Instance variables", "labels.append(label) le = preprocessing.LabelEncoder() le.fit(labels) hash_int_labels = le.transform(labels) return DataClump(DATA=data,", "trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for individual_prclip in", "a python dictionary {key: value}. This dictionary will be used", "not True/False.')) trigger = True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if not", "individual_prclip in prime_clipping_penalty: if not individual_prclip.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "initialise_libraries(instance_params): trigger = False ## ## Subfunction for recycling code", "Invalid character detected in RV adapter sequence.')) trigger = True", "returned. \"\"\" if string != '': return False if raise_exception:", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error tolerance is", "NameError: trigger=True try:type_func('bwa') except NameError: trigger=True try:type_func('samtools') except NameError: trigger=True", "in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Observation", "config_filename self.dtd_filename = scriptdir + \"/config/config.dtd\" ## ## Check for", "True/False.')) trigger = True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag", "will be used for variables within the pipeline. Recursion adapted", "trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "config vs dtd, parse info to dictionary, validate vs ruleset", "quality threshold integer out of range (0-38).')) trigger = True", "integer is invalid.')) trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',')", "alnrpf.close() ## ## No ranges required, only skip first line", "errno import shutil import sys import glob import datetime import", "be used for variables within the pipeline. Recursion adapted from", "for datasets: dictionary-like object that exposes its keys as attributes.\"\"\"", "('yes', 'y', 'true', 't', '1') return boolean_value def empty_string_check(string, raise_exception=True):", "Please use 1-10.')) trigger = True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if", "trigger=True try:type_func('fastqc') except NameError: trigger=True try:type_func('cutadapt') except NameError: trigger=True if", "import StringIO import PyPDF2 from sklearn import preprocessing from collections", "http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t): d = {t.tag: {} if t.attrib", "\"\"\"Container object for datasets: dictionary-like object that exposes its keys", "= sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file", "ends with R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__", "found.')) trigger = True for fqfile in glob.glob(os.path.join(data_directory, '*')): if", "over different shells/config files def type_func(binary): binary_result = [] binary_string", "date (for run ident) date = datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S')", "## ## If list was populated, create dictionary, Append keys", "which binaries to check for ## AttributeError in the situation", "', Colour.end, 'XML Config: Given demultiplexing reverse adapter position invalid!", "sanitise_outputs(jobname, output_argument): run_dir = '' output_root = output_argument[0] if jobname:", "index_path, direction): \"\"\" :param allele_object: :param index_path: :return: \"\"\" ##TODO", "snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "= os.path.join(index_path, label + '.fa') temp_output = os.path.join(index_path, label +", "## Main configuration instance settings data_directory = self.config_dict['@data_dir'] if not", "Specified min_overlap is not a valid integer.')) trigger = True", "detected in specified input directory.')) trigger = True forward_reference =", "le.fit(labels) hash_int_labels = le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le)", "'1'; ccgend = '20' if direction == 'rv': cagstart =", "'Output directories OK!')) return run_dir def replace_fqfile(mutate_list, target_fqfile, altered_path): if", "open(input_report_file, 'r') as alnrpf: align_lines = alnrpf.readlines() alnrpf.close() ## ##", "= True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified skip_seed_with_occurrence", "'\\033[91m' bold = '\\033[1m' underline = '\\033[4m' end = '\\033[0m'", "'XML Config: Specified seq_match_score integer is invalid.')) trigger=True mismatch_penalty =", "v in t.attrib.items()) if t.text: text = t.text.strip() if children", "csv, into objects in preparation for bunch() data_file_name = self.database", "not maximum_length == '': if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "data. Extracting!')) break for extract_target in target_files: unzipd = subprocess.Popen(['gzip',", "band_width integer is invalid.')) trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if not", "binary_subprocess.wait() if 'not found'.encode() in binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing", "name based on date (for run ident) date = datetime.date.today().strftime('%d-%m-%Y')", "import preprocessing from collections import defaultdict from xml.etree import cElementTree", "object (memory) and raise an error if not dtd_object.validate(self.config_file): dtd_file.close()", "demultiplexing reverse adapter position invalid! [5P, 3P, AP]')) trigger =", "alnrpf.readlines() alnrpf.close() ## ## No ranges required, only skip first", "indicates the string is NOT empty. Parameter raise_exception determines if", "= True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "invalid.')) trigger=True ## ## Genotype prediction flag settings if genotype_flag", "error tolerance is not a valid float.')) trigger = True", "cagstart = '1'; cagend = '200' ccgstart = '1'; ccgend", "import string import os import errno import shutil import sys", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_seed_length integer is", "gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',') for individual_gaextend in gap_extend_penalty:", "else: self.config_file = etree.parse(self.config_filename) ## ## Check config vs dtd,", "string import os import errno import shutil import sys import", "value}. This dictionary will be used for variables within the", "sorted_input[i] reverse_data = sorted_input[i+1] ## ## Check forward ends with", "= allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction == 'fw': cagstart", "toutfi.close() target_output = temp_output return target_output def seek_target(input_list, target): for", "false, path is invalid or unsupported files present so, quit", "t.text.strip() if children or t.attrib: if text: d[t.tag]['#text'] = text", "== 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Demultiplexing flag", "except NameError: trigger=True try: type_func('freebayes') except NameError: trigger=True return trigger", "= self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "= {t.tag: {k: v[0] if len(v) == 1 else v", "is returned. If the path does not exist, and raise_exception", "Cannot continue without pairing!')) sys.exit(2) ## ## Optimise so code", "cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else: return '*' def sanitise_alignment_output(input_object,", "string is empty, True is returned. \"\"\" if string !=", "True/False.')) trigger = True snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag", "If raise_exception is False and the string is empty, True", "raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could not be found.')) return", "len(sorted_input), 2): file_pair = {} forward_data = sorted_input[i] reverse_data =", "= True forward_reference = self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Changed from WHICH as apparently type functions over different shells/config", "'True': try:type_func('seqtk') except NameError: trigger=True try:type_func('bwa') except NameError: trigger=True try:type_func('samtools')", "try:type_func('bwa') except NameError: trigger=True try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except", "self.descriptor with open(modeldescr_name) as f: descr_text = f.read() ## Loads", "character is ' ' or character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified", "detected in RV adapter sequence.')) trigger = True error_tolerance =", "indexing issue.')) trigger = True ## ## Instance flag settings", "directories OK!')) return run_dir def replace_fqfile(mutate_list, target_fqfile, altered_path): if target_fqfile", "## If list was populated, create dictionary, Append keys if", "i def sanitise_trimming_output(input_object, input_list): if type(input_object) is int: cleanse_target =", "'' output_root = output_argument[0] if jobname: target_output = os.path.join(output_root, jobname)", "Trimming type is not Quality/Adapter/Both.')) trigger = True quality_threshold =", "if stage == 3: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target)", "= 0 for i in range(0, len(trim_lines)): if '== Summary", "to see if the path, specified by parameter path, exists.", "NameError: trigger=True if genotyping == 'True': try:type_func('samtools') except NameError: trigger=True", "'Predict') file_pair[sample_root] = [forward_data, reverse_data, instance_path, seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair)", "trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "for -b style try: quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment']", "summary_data[2:] ## ## If the argument input_report_file is from alignment..", "## Join typical and atypical reference into one file if", "range(0, len(input_list)): if target in input_list[i]: return i def sanitise_trimming_output(input_object,", "the config_dict variable. \"\"\" def __init__(self, scriptdir, config_filename=None): ## ##", "reference into one file if direction == 'fw': toutfi =", "', Colour.end, 'XML Config: Specified quality threshold integer out of", "min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_seed_length integer", "', Colour.end, 'XML Config: Invalid character detected in FW adapter", "checking binaries present ## Changed from WHICH as apparently type", "object for datasets: dictionary-like object that exposes its keys as", "not a valid integer.')) trigger = True ## ## Trimming", "snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control = instance_params['quality_control'] alignment =", "trigger=True try:type_func('bwa') except NameError: trigger=True try:type_func('samtools') except NameError: trigger=True try:type_func('generatr')", "run_dir = os.path.join(output_root, jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else:", "self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag == 'True' or snpcall_flag == 'False'):", "trigger = True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in reverse_adapter:", "'not found'.encode() in binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ',", "Colour.end, 'XML Config: SNP Observation value invalid! Please use 1-10.'))", "with R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O:", "trim_lines[i]: summary_start = i ## ## Slice and close summary_data", "self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "functions over different shells/config files def type_func(binary): binary_result = []", "if not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file does", "if the string provided by parameter string is empty. False", "def type_func(binary): binary_result = [] binary_string = 'type {}'.format(binary) binary_subprocess", "True ## ## Demultiplexing flag settings trim_adapter_base = ['A', 'G',", "self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty = prime_clipping_penalty_raw.split(',') for individual_prclip in prime_clipping_penalty: if not", "d[:-1] label = d[-1] labels.append(label) le = preprocessing.LabelEncoder() le.fit(labels) hash_int_labels", "sorted_input = sorted(input_files) sequence_pairs = [] file_count = len(sorted_input) if", "True return False def initialise_libraries(instance_params): trigger = False ## ##", "binary_subprocess.communicate() binary_subprocess.wait() if 'not found'.encode() in binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__", "input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else: return '*' def sanitise_alignment_output(input_object, input_list, stage):", "contents. For example, a string with 'true', 't', 'y' or", "forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not in ['5P', '3P', 'AP']:", "and the string is empty, True is returned. \"\"\" if", "SNP Calling flag is not True/False.')) trigger = True ##", "data set modeldescr_name = self.descriptor with open(modeldescr_name) as f: descr_text", "gap_extend_penalty integer(s) is(are) invalid.')) trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty'] prime_clipping_penalty =", "quality_control = instance_params['quality_control'] alignment = instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction'] snp_calling", "'Job folder already exists. Delete existing folder? Y/N: ')) if", "intervening, ccg_region, cct_region, tp_input]: loci_root.append(node) s = etree.tostring(data_root, pretty_print=True) with", "atypical_path def generate_reference(input_xml, index_path, ref_indexes, direction): ##TODO docstring label =", "= subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output =", "== 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence Alignment", "the path, specified by parameter path, exists. Can be either", "Config: Given demultiplexing reverse adapter position invalid! [5P, 3P, AP]'))", "= d[-1] labels.append(label) le = preprocessing.LabelEncoder() le.fit(labels) hash_int_labels = le.transform(labels)", "## AttributeError in the situation where instance_params origin differs ##", "i, d in enumerate(data_file): data[i] = d[:-1] label = d[-1]", "atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart", "sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file does", "specified by parameter path, exists. Can be either a directory", "reportlab.pdfgen import canvas class Colour: def __init__(self): pass purple =", "## Genotype prediction flag settings if genotype_flag == 'True': snp_observation_pcnt", "os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified forward reference", "type(input_object) is int: if stage == 3: cleanse_target = input_list[input_object].lstrip().rstrip().split('", "Config: Specified reverse reference file could not be found.')) trigger", "Config: Specified indel_penalty integer(s) is(are) invalid.')) trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty']", "UNIX type for checking binaries present ## Changed from WHICH", "', Colour.end, 'XML Config: Specified min_overlap is not a valid", "adapter sequence.')) trigger = True error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if not", "n_samples = int(temp[0]) n_features = int(temp[1]) data = np.empty((n_samples, n_features))", "'.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected compressed input data. Extracting!'))", "False ## ## Jobname prefix validity check if parsed_arguments.jobname: for", "is not an integer.')) trigger = True if trigger: log.error('{}{}{}{}'.format(Colour.red,", "the argument input_report_file is from trimming.. if stage == 'trim':", "Colour.end, 'XML Config: Specified prime_clipping_penalty integer(s) is(are) invalid.')) trigger=True unpaired_pairing_penalty", "+ \"/config/config.dtd\" ## ## Check for configuration file (just incase)", "input_list): if type(input_object) is int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target", "log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could not be found.')) return False", "= self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "## ## If the argument input_report_file is from alignment.. if", "position invalid! [5P, 3P, AP]')) trigger = True error_rate =", "objects if stage == 'gtype': pass def generate_atypical_xml(label, allele_object, index_path,", "apparently type functions over different shells/config files def type_func(binary): binary_result", "3P, AP]')) trigger = True error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if not", "reference file is not a fa/fas file.')) trigger = True", "'shd__ ', Colour.end, 'XML Config: Invalid character detected in reverse_adapter", "os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC' cagstart = '';", "subprocess import logging as log import numpy as np import", "raise_exception: raise ValueError(\"Empty string detected!\") return True def sanitise_inputs(parsed_arguments): \"\"\"", "trigger=True if alignment == 'True': try:type_func('seqtk') except NameError: trigger=True try:type_func('bwa')", "t.attrib: if text: d[t.tag]['#text'] = text else: d[t.tag] = text", "2 == 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O: Non-even number", "for dc in map(recursive_generation, children): for k, v in dc.items():", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified quality threshold integer", "shutil import sys import glob import datetime import subprocess import", "if not (sequence_qc_flag == 'True' or sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red,", "= csv.reader(f) temp = next(data_file) n_samples = int(temp[0]) n_features =", "Atypical Realignment flag is not True/False.')) trigger = True genotype_flag", "'shd__ ', Colour.end, 'XML Config: Specified quality threshold integer is", "', Colour.end, 'Invalid input. Please input Y or N.')) continue", "trigger = True quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output directories OK!')) return run_dir", "'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "directory could not be found.')) trigger = True for fqfile", "Failure, exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'XML Config:", "as alnrpf: align_lines = alnrpf.readlines() alnrpf.close() ## ## No ranges", "issue.')) trigger = True ## ## Instance flag settings demultiplexing_flag", "Jobname prefix validity check if parsed_arguments.jobname: for character in parsed_arguments.jobname:", "= True error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "== 'gtype': pass def generate_atypical_xml(label, allele_object, index_path, direction): \"\"\" :param", "if demultiplexing_flag == 'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in", "'shd__ ', Colour.end, 'XML Config: Specified min_length is not a", "'\\033[0m' class ConfigReader(object): \"\"\" The configuration file reader. Opens a", "Colour.end, 'XML Config: Failure, exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ',", "cat_process.wait() toutfi.close() target_output = temp_output return target_output def seek_target(input_list, target):", "\"\"\" Simple check to see if the string provided by", "'True' or genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "return ' '.join(cleanse_target) else: return '*' def mkdir_p(path): try: os.makedirs(path)", "self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "raise_exception=True): \"\"\" Checks to see if the path, specified by", "Colour.end, 'XML Config: Specified chain_drop float is invalid.')) trigger=True seeded_chain_drop", "not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Invalid", "have the data from our own objects if stage ==", "XML file.')) trigger = True return trigger def extract_data(input_data_directory): target_files", "##TODO docstring atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank =", "(purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid input. Please", "## Create XML data_root = etree.Element('data') loci_root = etree.Element('loci', label=allele_object.get_reflabel());", "'XML Config: Specified mismatch_penalty integer is invalid.')) trigger=True indel_penalty_raw =", "Config: Specified min_length is not a valid integer.')) trigger =", "return summary_data[2:] ## ## If the argument input_report_file is from", "files specified. Cannot continue without pairing!')) sys.exit(2) ## ## Optimise", "', Colour.end, 'XML Config: Genotype Prediction control flag is not", "direction): \"\"\" :param allele_object: :param index_path: :return: \"\"\" ##TODO docstring", "dd = defaultdict(list) for dc in map(recursive_generation, children): for k,", "trigger = True ## ## Config mode check if parsed_arguments.config:", "'XML Config: FW and RV references have identical filenames. Will", "= self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "if not (atypical_flag == 'True' or atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red,", "d[t.tag].update(('@' + k, v) for k, v in t.attrib.items()) if", "is(are) invalid.')) trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "mutate_list.index(target_fqfile) mutate_list[loc] = altered_path return mutate_list def scrape_summary_data(stage, input_report_file): ##", "len(trim_lines)): if '== Summary ==' in trim_lines[i]: summary_start = i", "isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seed_length_extension", "file.')) trigger = True return trigger def extract_data(input_data_directory): target_files =", "', Colour.end, 'Creating instance run directory.. ')) mkdir_p(run_dir) ## Inform", "cagstart = '100'; cagend = '100' ccgstart = '1'; ccgend", "individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified indel_penalty integer(s)", "'XML Config: Specified min_seed_length integer is invalid.')) trigger=True band_width =", "detected in reverse_adapter demultiplexing flag.')) trigger = True reverse_position =", "float.')) trigger = True if not float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red,", "etree.parse(self.config_filename) ## ## Check config vs dtd, parse info to", "float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seed_length_extension float", "target in input_list[i]: return i def sanitise_trimming_output(input_object, input_list): if type(input_object)", "try:type_func('java') except NameError: trigger=True try:type_func('fastqc') except NameError: trigger=True try:type_func('cutadapt') except", "'\\033[1m' underline = '\\033[4m' end = '\\033[0m' class ConfigReader(object): \"\"\"", "in trim_lines[i]: summary_start = i ## ## Slice and close", "'XML Config: Specified indel_penalty integer(s) is(are) invalid.')) trigger=True gap_extend_penalty_raw =", "parameter string is empty. False indicates the string is NOT", "ValueError exception should be raised if the string is empty.", "True def sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check and check_input_files if either", "'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Genotype Prediction control", "'\\033[94m' green = '\\033[92m' yellow = '\\033[93m' red = '\\033[91m'", "', Colour.end, 'XML Config: Invalid character detected in reverse_adapter demultiplexing", "le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value): \"\"\"", "cagend = '200' ccgstart = '1'; ccgend = '20' if", "= output_argument[0] if jobname: target_output = os.path.join(output_root, jobname) if not", "', Colour.end, 'XML Config: Specified trimming adapter not valid selection.'))", "is invalid.')) trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "Config: Genotype Prediction control flag is not True/False.')) trigger =", "have identical filenames. Will create indexing issue.')) trigger = True", "input_file): if input_file.endswith(input_format): return True return False def initialise_libraries(instance_params): trigger", "'shd__ ', Colour.end, 'Output directories OK!')) return run_dir def replace_fqfile(mutate_list,", "'100'; cagend = '100' ccgstart = '1'; ccgend = '20'", "= '\\033[36m' blue = '\\033[94m' green = '\\033[92m' yellow =", "found.')) trigger = True if not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red,", "datetime import subprocess import logging as log import numpy as", "Specified data directory could not be found.')) trigger = True", "max_length is not a valid integer.')) trigger = True ##", ":return: \"\"\" ##TODO docstring atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel()))", "type='threeprime', flank=tp_flank) for node in [fp_input, cag_region, intervening, ccg_region, cct_region,", "Config: Specified prime_clipping_penalty integer(s) is(are) invalid.')) trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty']", "check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file is not", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified max_length is not", "## ## Trimming flag settings if sequence_qc_flag == 'True': trimming_type", "_R1. ', forward_data)) sys.exit(2) ## ## Check reverse ends with", "trpf: trim_lines = trpf.readlines() ## ## Determine buffer size to", "', Colour.end, 'Specified config file is not an XML file.'))", "direction == 'fw': toutfi = open(temp_output, 'w') cat_process = subprocess.Popen(['cat',", "write output.') else: ## Ensures root output is a real", "direction == 'rv': cagstart = '100'; cagend = '100' ccgstart", "stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output = temp_output return target_output def seek_target(input_list,", "{} if t.attrib else None} children = list(t) ## ##", "extracts information from the tree into a python dictionary {key:", "not a valid integer.')) trigger = True minimum_length = self.config_dict['demultiplex_flags']['@min_length']", "a string (boolean_value), returns a boolean value representing the string", "= preprocessing.LabelEncoder() le.fit(labels) hash_int_labels = le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels, FTRNAME=feature_names[:-1],", "not end in _R1. ', forward_data)) sys.exit(2) ## ## Check", "')) mkdir_p(output_root) run_dir = os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end,", "True is returned. \"\"\" if string != '': return False", "= True ## ## Instance flag settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex']", "trigger = True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for charbase in reverse_adapter:", "Optimise so code isn't recycled for i in range(0, len(sorted_input),", "Imports import string import os import errno import shutil import", "integer out of range (0-38).')) trigger = True trim_adapters =", "in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error", "',Colour.end,'I/O: Forward input file does not end in _R1. ',", "if '-q' in trim_lines[1]: scraping_buffer += 1 ## ## Get", "= ['A', 'G', 'C', 'T'] if demultiplexing_flag == 'True': forward_adapter", "everywhere else in pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir)", "if alignment_flag == 'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit():", "quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "if type(input_object) is int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else:", "'': return False if raise_exception: raise ValueError(\"Empty string detected!\") return", "= scriptdir self.config_filename = config_filename self.dtd_filename = scriptdir + \"/config/config.dtd\"", "Observation value invalid! Please use 1-10.')) trigger = True quality_cutoff", "trigger = True maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length ==", "control flag is not True/False.')) trigger = True snpcall_flag =", "sys.exit(2) ## ## Make Stage outputs for use in everywhere", "Stage outputs for use in everywhere else in pipeline sample_root", "argument input_report_file is from alignment.. if stage == 'align': with", "label + '_concat.fa') gen_process = subprocess.Popen(['generatr', '-i', input_xml, '-o', target_output],", "## ## Jobname prefix validity check if parsed_arguments.jobname: for character", "Config: Specified quality threshold integer out of range (0-38).')) trigger", "stage): if type(input_object) is int: if stage == 3: cleanse_target", "style, except AttributeError for -b style try: quality_control = instance_params.config_dict['instance_flags']['@quality_control']", "self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase not in trim_adapter_base:", "float is invalid.')) trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit():", "NOT empty. Parameter raise_exception determines if a ValueError exception should", "through accessing the config_dict variable. \"\"\" def __init__(self, scriptdir, config_filename=None):", "return trigger def extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory, '*')) for extract_target", "in forward_adapter demultiplexing flag.')) trigger = True forward_position = self.config_dict['demultiplex_flags']['@forward_position']", "Realignment flag is not True/False.')) trigger = True genotype_flag =", "parsed_arguments.jobname: for character in parsed_arguments.jobname: if character is ' '", "purple = '\\033[95m' cyan = '\\033[96m' darkcyan = '\\033[36m' blue", "bold = '\\033[1m' underline = '\\033[4m' end = '\\033[0m' class", "file_count % 2 == 0: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O:", "log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output directories OK!')) return run_dir def", "type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend) cct_region = etree.Element('input', type='repeat_region', order='3',", "self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red,", "'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Demultiplexing flag is", "is a real directory ## Generates folder name based on", "log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid input. Please input Y or", "xml doc, puts through generator, returns dictionary string_repr = etree.tostring(self.config_file,", "reverse reference file could not be found.')) trigger = True", "threshold integer is invalid.')) trigger = True elif not int(quality_threshold)", "not (genotype_flag == 'True' or genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "RV adapter sequence.')) trigger = True error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if", "= int(temp[1]) data = np.empty((n_samples, n_features)) temp = next(data_file) feature_names", "tidy up report for genotyping ## since we already have", "be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output directories OK!')) return", "object that exposes its keys as attributes.\"\"\" def __init__(self, **kwargs):", "= datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S') today = date + '-'", "tp_input = etree.Element('input', type='threeprime', flank=tp_flank) for node in [fp_input, cag_region,", "Y or N.')) continue else: break if purge_choice.lower() == 'y':", "FTRNAME=feature_names[:-1], DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value): \"\"\" Given a string (boolean_value),", "= True minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length == '':", "trigger = True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not in", "not a fa/fas file.')) trigger = True if forward_reference.split('/')[-1] ==", "= self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "tree into a python dictionary {key: value}. This dictionary will", "in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Invalid character", "integer.')) trigger = True minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit():", "< 1.0.')) trigger = True ## ## Alignment flag settings", "as f: data_file = csv.reader(f) temp = next(data_file) n_samples =", "log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file does not end in _R2.", "== 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence Quality", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence Alignment flag is", "purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job folder already exists.", "FW and RV references have identical filenames. Will create indexing", "Reverse input file does not end in _R2. ', reverse_data))", "= '20' if direction == 'rv': cagstart = '100'; cagend", "fa/fas file.')) trigger = True reverse_reference = self.config_dict['@reverse_reference'] if not", "'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Calling flag", "not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Given demultiplexing reverse adapter", "[] binary_string = 'type {}'.format(binary) binary_subprocess = subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE,", "'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Sequence Quality control", "data_file_name = self.database with open(data_file_name) as f: data_file = csv.reader(f)", "def parse_boolean(boolean_value): \"\"\" Given a string (boolean_value), returns a boolean", "return True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could not", "Parameter raise_exception determines if a ValueError exception should be raised", "forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input", "class DataClump(dict): \"\"\"Container object for datasets: dictionary-like object that exposes", "found.')) return False def check_input_files(input_format, input_file): if input_file.endswith(input_format): return True", "break for extract_target in target_files: unzipd = subprocess.Popen(['gzip', '-q', '-f',", "its keys as attributes.\"\"\" def __init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__", "## Loads data set from csv, into objects in preparation", "Cannot write output.') else: ## Ensures root output is a", "'1'; cagend = '200' ccgstart = '1'; ccgend = '20'", "'shd__ ', Colour.end, 'XML Config: Demultiplexing flag is not set", "## If validation fails, close the object (memory) and raise", "target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ## ## Join typical and atypical", "self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "def generate_atypical_xml(label, allele_object, index_path, direction): \"\"\" :param allele_object: :param index_path:", "valid integer.')) trigger = True minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if not", "information from the tree into a python dictionary {key: value}.", "'shd__ ', Colour.end, 'XML Config: Specified min_overlap is not a", "if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "= True maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length == '':", "boolean value representing the string contents. For example, a string", "sorted_input[i+1] ## ## Check forward ends with R1 forward_data_name =", "True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could not be", "loci_root.append(node) s = etree.tostring(data_root, pretty_print=True) with open(atypical_path, 'w') as xmlfi:", "self.dtd_filename = scriptdir + \"/config/config.dtd\" ## ## Check for configuration", "\"\"\" The configuration file reader. Opens a configuration file, and", "Colour.end, 'XML Config: Specified indel_penalty integer(s) is(are) invalid.')) trigger=True gap_extend_penalty_raw", "on date (for run ident) date = datetime.date.today().strftime('%d-%m-%Y') walltime =", "= ''; ccgend = '' ccglen = allele_object.get_ccg() cctlen =", "instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling'] if quality_control ==", "self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "trigger = True return trigger def extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory,", "if not mismatch_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "## ## Get input files from data path ## Sort", "', Colour.end, 'XML Config: Specified prime_clipping_penalty integer(s) is(are) invalid.')) trigger=True", "', Colour.end, 'XML Config: Invalid character detected in forward_adapter demultiplexing", "class DataLoader: def __init__(self, database, descriptor): self.database = database self.descriptor", "description file for respective data set modeldescr_name = self.descriptor with", "Config: Invalid character detected in forward_adapter demultiplexing flag.')) trigger =", "not (trimming_type == 'Quality' or trimming_type == 'Adapter' or trimming_type", "except NameError: trigger=True try:type_func('bwa') except NameError: trigger=True try:type_func('samtools') except NameError:", "key if t.attrib: d[t.tag].update(('@' + k, v) for k, v", "Colour.end, 'XML Config: Specified skip_seed_with_occurrence integer is invalid.')) trigger=True chain_drop", "Colour.end, 'Specified config file could not be found.')) trigger =", "output root... ')) mkdir_p(output_root) run_dir = os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__", "is not a valid float.')) trigger = True if not", "= True ## ## Alignment flag settings if alignment_flag ==", "= True ## ## Trimming flag settings if sequence_qc_flag ==", "True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "instance_params['snp_calling'] if quality_control == 'True': try:type_func('java') except NameError: trigger=True try:type_func('fastqc')", "(fqfile.endswith('.fq') or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "configuration file, and if valid, converts the parameters within the", "input_report_file is from trimming.. if stage == 'trim': with open(input_report_file,", "or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified reverse", "Demultiplexing flag settings trim_adapter_base = ['A', 'G', 'C', 'T'] if", "', Colour.end, 'XML Config: Specified max_length is not a valid", "== 'True' or sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "'shd__ ', Colour.end, 'XML Config: Invalid character detected in forward_adapter", "Determine buffer size to slice from above array scraping_buffer =", "be found.')) return False def check_input_files(input_format, input_file): if input_file.endswith(input_format): return", "or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Non FastQ/GZ", "= self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red,", "prior='1') ccg_region = etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend) cct_region", "i in range(0, len(trim_lines)): if '== Summary ==' in trim_lines[i]:", "character detected in forward_adapter demultiplexing flag.')) trigger = True forward_position", "= string.lower(boolean_value) in ('yes', 'y', 'true', 't', '1') return boolean_value", "in binary_result[0] or binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ', binary, '!'))", "try: quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction']", "if not (adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "'': if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "sequence_qc_flag == 'True': trimming_type = self.config_dict['trim_flags']['@trim_type'] if not (trimming_type ==", "a real directory ## Generates folder name based on date", "fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "screwy on linux input_files = glob.glob(os.path.join(data_path, '*')) sorted_input = sorted(input_files)", "## ## Check config vs dtd, parse info to dictionary,", "if snp_calling == 'True': try: type_func('picard') except NameError: trigger=True try:", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Demultiplexing flag is not", "in FW adapter sequence.')) trigger = True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter']", "present ## Changed from WHICH as apparently type functions over", "'shd__ ', Colour.end, 'XML Config: Genotype Prediction control flag is", "**kwargs): dict.__init__(self, kwargs) self.__dict__ = self class DataLoader: def __init__(self,", "if a ValueError exception should be raised if the string", "Demultiplexing flag is not set to True/False.')) trigger = True", "\"\"\" Given a string (boolean_value), returns a boolean value representing", "or snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "check to see if the string provided by parameter string", "only skip first line return align_lines[1:] ## ## No need", "for character in parsed_arguments.jobname: if character is ' ' or", "flank=tp_flank) for node in [fp_input, cag_region, intervening, ccg_region, cct_region, tp_input]:", "{0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def set_dictionary(self): \"\"\" Takes the", "input data. Extracting!')) break for extract_target in target_files: unzipd =", "and raise_exception is set to True, an IOError is raised", "i in range(0, len(input_list)): if target in input_list[i]: return i", "formatted xml doc, puts through generator, returns dictionary string_repr =", "target_files: unzipd = subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target], stderr=subprocess.PIPE) unzipd.wait()", "or trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "date = datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S') today = date +", "',Colour.end,'Specified input path could not be found.')) return False def", "for i in range(0, len(input_list)): if target in input_list[i]: return", "Specified band_width integer is invalid.')) trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if", "scraping_buffer = 8 if '-q' in trim_lines[1]: scraping_buffer += 1", "= '' ccglen = allele_object.get_ccg() cctlen = allele_object.get_cct() tp_flank =", "allele_object, index_path, direction): \"\"\" :param allele_object: :param index_path: :return: \"\"\"", "for checking binaries present ## Changed from WHICH as apparently", "not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified indel_penalty", "target_output = os.path.join(index_path, label + '.fa') temp_output = os.path.join(index_path, label", "return mutate_list def scrape_summary_data(stage, input_report_file): ## ## If the argument", "Colour.end, 'XML Config: Specified max_length is not a valid integer.'))", "For example, a string with 'true', 't', 'y' or 'yes'", "', Colour.end, 'Creating output root... ')) mkdir_p(output_root) run_dir = os.path.join(output_root,", "make it ## Then make the run directory for datetime", "False and the string is empty, True is returned. \"\"\"", "## ## If the argument input_report_file is from trimming.. if", "of input files specified. Cannot continue without pairing!')) sys.exit(2) ##", "= '100' ccgstart = '1'; ccgend = '20' ## ##", "next(data_file) n_samples = int(temp[0]) n_features = int(temp[1]) data = np.empty((n_samples,", "', Colour.end, 'XML Config: Demultiplexing flag is not set to", "return False def initialise_libraries(instance_params): trigger = False ## ## Subfunction", "trigger = True minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length ==", "(0-38).')) trigger = True trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag']", "use 1-10.')) trigger = True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if not", "exist, make it ## Then make the run directory for", "', Colour.end, 'XML Config: Specified reverse reference file is not", "ccglen = allele_object.get_ccg() cctlen = allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if", "int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "## ## Check reverse ends with R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0]", "map(recursive_generation, children): for k, v in dc.items(): dd[k].append(v) d =", "trimming_type = self.config_dict['trim_flags']['@trim_type'] if not (trimming_type == 'Quality' or trimming_type", "Check config vs dtd, parse info to dictionary, validate vs", "return False def check_input_files(input_format, input_file): if input_file.endswith(input_format): return True return", "if the path, specified by parameter path, exists. Can be", "dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def set_dictionary(self): \"\"\" Takes the now validated", "== 'align': with open(input_report_file, 'r') as alnrpf: align_lines = alnrpf.readlines()", "= False ## ## Subfunction for recycling code ## Calls", "alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag == 'True' or alignment_flag", "a configuration file, and if valid, converts the parameters within", "situation where instance_params origin differs ## try for -c style,", "Specified skip_seed_with_occurrence integer is invalid.')) trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if", "and check_input_files if either return false, path is invalid or", "'-' + walltime ## If the user specified root doesn't", "= ['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag in trim_adapters):", "raise ValueError(\"Empty string detected!\") return True def sanitise_inputs(parsed_arguments): \"\"\" Utilises", "a valid float.')) trigger = True if not float(error_tolerance) in", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Calling flag is", "', Colour.end, 'XML Config: Trimming type is not Quality/Adapter/Both.')) trigger", "k, v) for k, v in t.attrib.items()) if t.text: text", "## ## Slice and close summary_data = trim_lines[summary_start:summary_start + scraping_buffer]", "\"\"\" Method which validates the configuration file's contents. If all", "R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse", "continue without pairing!')) sys.exit(2) ## ## Optimise so code isn't", "[fp_input, cag_region, intervening, ccg_region, cct_region, tp_input]: loci_root.append(node) s = etree.tostring(data_root,", "structure \"\"\" ## ## Open > etree.DTD object dtd_file =", "= d[:-1] label = d[-1] labels.append(label) le = preprocessing.LabelEncoder() le.fit(labels)", "subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target], stderr=subprocess.PIPE) unzipd.wait() return True def", "prefix validity check if parsed_arguments.jobname: for character in parsed_arguments.jobname: if", "genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control =", "'True' or alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "input_list[i]: return i def sanitise_trimming_output(input_object, input_list): if type(input_object) is int:", "os.path.join(instance_rundir, sample_root, 'Align') predict_path = os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root] =", "', Colour.end, 'Job folder already exists. Delete existing folder? Y/N:", "i ## ## Slice and close summary_data = trim_lines[summary_start:summary_start +", "reverse_data = sorted_input[i+1] ## ## Check forward ends with R1", "differs ## try for -c style, except AttributeError for -b", "os.path.join(output_root, jobname) if os.path.exists(run_dir): shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else: raise Exception('User", "in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified trimming", "minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if not minimum_length == '': if not", "Checks to see if the path, specified by parameter path,", "puts through generator, returns dictionary string_repr = etree.tostring(self.config_file, pretty_print=True) element_tree", "alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except", "cag_region, intervening, ccg_region, cct_region, tp_input]: loci_root.append(node) s = etree.tostring(data_root, pretty_print=True)", "= self.config_dict['trim_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase not in", "config file is not an XML file.')) trigger = True", "if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "start=cagstart, end=cagend) intervening = etree.Element('input', type='intervening', sequence=intv, prior='1') ccg_region =", "see if the string provided by parameter string is empty.", "Colour.end, 'XML Config: Specified error tolerance is not 0.0 <", "subprocess.Popen(['generatr', '-i', input_xml, '-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ## ##", "Slice and close summary_data = trim_lines[summary_start:summary_start + scraping_buffer] trpf.close() return", "Colour.end, 'XML Config: Invalid character detected in RV adapter sequence.'))", "indel_penalty integer(s) is(are) invalid.')) trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty =", "successful!')) class DataClump(dict): \"\"\"Container object for datasets: dictionary-like object that", "accessing the config_dict variable. \"\"\" def __init__(self, scriptdir, config_filename=None): ##", "ignore_errors=True) mkdir_p(run_dir) else: raise Exception('User chose not to delete pre-existing", "Config: Specified max_length is not a valid integer.')) trigger =", "__init__(self, database, descriptor): self.database = database self.descriptor = descriptor def", "Quality/Adapter/Both.')) trigger = True quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit():", "If all pass, guarantees that the settings dictionary is full", "data = np.empty((n_samples, n_features)) temp = next(data_file) feature_names = np.array(temp)", "Colour.end, 'XML Config: Specified reverse reference file could not be", "def sanitise_trimming_output(input_object, input_list): if type(input_object) is int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip()", "'shd__ ', Colour.end, 'XML Config: SNP Observation value invalid! Please", "the tree into a python dictionary {key: value}. This dictionary", "valid settings! \"\"\" trigger = False ## ## Main configuration", "'True': try: type_func('picard') except NameError: trigger=True try: type_func('freebayes') except NameError:", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Given demultiplexing forward adapter", "in reverse_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "True/False.')) trigger = True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag", "Join typical and atypical reference into one file if direction", "etree.DTD(dtd_file) ## ## If validation fails, close the object (memory)", "forward reference file is not a fa/fas file.')) trigger =", "dtd, parse info to dictionary, validate vs ruleset self.validate_against_dtd() self.set_dictionary()", "intv = allele_object.get_intervening() ccgstart = ''; ccgend = '' ccglen", "True ## ## Instance flag settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if", "determines if a ValueError exception should be raised if the", "\"\"\" Checks to see if the path, specified by parameter", "used for variables within the pipeline. Recursion adapted from http://stackoverflow.com/a/9286702", "self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag == 'True' or atypical_flag == 'False'):", "if not (alignment_flag == 'True' or alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red,", "try: type_func('picard') except NameError: trigger=True try: type_func('freebayes') except NameError: trigger=True", "from lxml import etree from reportlab.pdfgen import canvas class Colour:", "def __init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__ = self class DataLoader:", "s = etree.tostring(data_root, pretty_print=True) with open(atypical_path, 'w') as xmlfi: xmlfi.write(s.decode())", "if text: d[t.tag]['#text'] = text else: d[t.tag] = text return", "## ## Main configuration instance settings data_directory = self.config_dict['@data_dir'] if", "== 'True' or snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "in ['5P', '3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "Config: Specified gap_extend_penalty integer(s) is(are) invalid.')) trigger=True prime_clipping_penalty_raw = self.config_dict['alignment_flags']['@prime_clipping_penalty']", "from data path ## Sort so that ordering isn't screwy", "sequence_pairs def filesystem_exists_check(path, raise_exception=True): \"\"\" Checks to see if the", "use in everywhere else in pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path", "validate vs ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config() def validate_against_dtd(self): \"\"\" Validate", "forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: FW", "'Creating Output with prefix: ', jobname)) run_dir = os.path.join(output_root, jobname)", "min_length is not a valid integer.')) trigger = True maximum_length", "band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified band_width integer", "d[t.tag] = text return d ## ## Takes the formatted", "if the string is empty. If raise_exception is False and", "is empty. If raise_exception is False and the string is", "pass def generate_atypical_xml(label, allele_object, index_path, direction): \"\"\" :param allele_object: :param", "'fw': cagstart = '1'; cagend = '200' ccgstart = '1';", "file is not an XML file.')) trigger = True return", "else: d[t.tag] = text return d ## ## Takes the", "check_input_files if either return false, path is invalid or unsupported", "the string is empty. If raise_exception is False and the", "## Jobname prefix validity check if parsed_arguments.jobname: for character in", "def sanitise_alignment_output(input_object, input_list, stage): if type(input_object) is int: if stage", "not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified data", "Colour.end, 'Creating instance run directory.. ')) mkdir_p(run_dir) ## Inform user", "True quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if not quality_threshold.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "input files from data path ## Sort so that ordering", "specified!\") else: self.config_file = etree.parse(self.config_filename) ## ## Check config vs", "not (fqfile.endswith('.fq') or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise", "not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Quality", "mkdir_p(run_dir) else: raise Exception('User chose not to delete pre-existing Job", "be found.')) trigger = True if not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')):", "f: descr_text = f.read() ## Loads data set from csv,", "self.validate_config() def validate_against_dtd(self): \"\"\" Validate input config against DTD ruleset", "not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file does not end", "'': if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "returns a boolean value representing the string contents. For example,", "+= 1 ## ## Get Anchor summary_start = 0 for", "np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error tolerance", "Specified chain_drop float is invalid.')) trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if", "else v for k, v in dd.items()}} ## ## Values", "snp_calling == 'True': try: type_func('picard') except NameError: trigger=True try: type_func('freebayes')", "file could not be found.')) trigger = True if not", "path, specified by parameter path, exists. Can be either a", "= self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag == 'True' or sequence_qc_flag ==", "Specified indel_penalty integer(s) is(are) invalid.')) trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty", "Colour.end, 'XML Config: Specified quality threshold integer is invalid.')) trigger", "for xmlfile in parsed_arguments.config: if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "for bunch() data_file_name = self.database with open(data_file_name) as f: data_file", "data detected in specified input directory.')) trigger = True forward_reference", "mkdir_p(output_root) run_dir = os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating", "'True' or demultiplexing_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "type='fiveprime', flank=fp_flank) cag_region = etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend)", "is invalid.')) trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score'] if not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "\"\"\" ## ## Open > etree.DTD object dtd_file = open(self.dtd_filename,", "input config against DTD ruleset i.e. confirms conformation of XML", "True return trigger def extract_data(input_data_directory): target_files = glob.glob(os.path.join(input_data_directory, '*')) for", "Cutoff value is not an integer.')) trigger = True if", "gap_extend_penalty = gap_extend_penalty_raw.split(',') for individual_gaextend in gap_extend_penalty: if not individual_gaextend.isdigit():", "mkdir_p(run_dir) else: purge_choice = '' while True: purge_choice = input('{}{}{}{}'.format(Colour.bold,", "trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "len(input_list)): if target in input_list[i]: return i def sanitise_trimming_output(input_object, input_list):", "= self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length == '': if not maximum_length.isdigit():", "## To determine which binaries to check for ## AttributeError", "or sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "is invalid.')) trigger = True elif not int(quality_threshold) in range(0,39):", "be found.')) trigger = True if not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')):", "if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "= self.descriptor with open(modeldescr_name) as f: descr_text = f.read() ##", "trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Colour.end, 'XML Config: Specified error tolerance is not a valid", "= instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError:", "## Check config vs dtd, parse info to dictionary, validate", "end = '\\033[0m' class ConfigReader(object): \"\"\" The configuration file reader.", "invalid! [5P, 3P, AP]')) trigger = True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter']", "## Make Stage outputs for use in everywhere else in", "', Colour.end, 'I/O: Non-even number of input files specified. Cannot", "does not end in _R2. ', reverse_data)) sys.exit(2) ## ##", "sequence_pairs.append(file_pair) return sequence_pairs def filesystem_exists_check(path, raise_exception=True): \"\"\" Checks to see", ":param allele_object: :param index_path: :return: \"\"\" ##TODO docstring atypical_path =", "in RV adapter sequence.')) trigger = True error_tolerance = self.config_dict['trim_flags']['@error_tolerance']", "'XML Config: Given demultiplexing forward adapter position invalid! [5P, 3P,", "Config: Specified quality threshold integer is invalid.')) trigger = True", "= self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',') for individual_indelpen in indel_penalty: if", "target_output = temp_output return target_output def seek_target(input_list, target): for i", "within the pipeline. Recursion adapted from http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t):", "1 ## ## Get Anchor summary_start = 0 for i", "np.array(temp) labels = [] for i, d in enumerate(data_file): data[i]", "Config: Specified band_width integer is invalid.')) trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension']", "start=str(cctlen), end=str(cctlen)) tp_input = etree.Element('input', type='threeprime', flank=tp_flank) for node in", "= os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC') align_path = os.path.join(instance_rundir,", "we already have the data from our own objects if", "folder. Cannot write output.') else: ## Ensures root output is", "string != '': return False if raise_exception: raise ValueError(\"Empty string", "etree.Element('input', type='intervening', sequence=intv, prior='1') ccg_region = etree.Element('input', type='repeat_region', order='2', unit='CCG',", "minimum_length == '': if not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "def filesystem_exists_check(path, raise_exception=True): \"\"\" Checks to see if the path,", "Loads data set from csv, into objects in preparation for", "temp_output = os.path.join(index_path, label + '_concat.fa') gen_process = subprocess.Popen(['generatr', '-i',", "## ## To determine which binaries to check for ##", "{1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close() def set_dictionary(self): \"\"\" Takes the now", "the argument input_report_file is from alignment.. if stage == 'align':", "flag.')) trigger = True forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not", "list(t) ## ## If list was populated, create dictionary, Append", "Config: Specified trimming adapter not valid selection.')) trigger = True", "if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "that exposes its keys as attributes.\"\"\" def __init__(self, **kwargs): dict.__init__(self,", "summary_data = trim_lines[summary_start:summary_start + scraping_buffer] trpf.close() return summary_data[2:] ## ##", "the pipeline. Recursion adapted from http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t): d", "within the file to a dictionary object, reader to be", "output is a real directory ## Generates folder name based", "trigger=True gap_extend_penalty_raw = self.config_dict['alignment_flags']['@gap_extend_penalty'] gap_extend_penalty = gap_extend_penalty_raw.split(',') for individual_gaextend in", "text return d ## ## Takes the formatted xml doc,", "'y') and not (purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "adapter sequence.')) trigger = True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for charbase", "pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir) seq_qc_path = os.path.join(instance_rundir,", "Quality Cutoff value is not an integer.')) trigger = True", "'shd__ ', Colour.end, 'XML Config: Sequence Alignment flag is not", "pairing!')) sys.exit(2) ## ## Optimise so code isn't recycled for", "NameError: trigger=True try:type_func('fastqc') except NameError: trigger=True try:type_func('cutadapt') except NameError: trigger=True", "'shd__ ', Colour.end, 'Detected compressed input data. Extracting!')) break for", "seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seeded_chain_drop integer", "', Colour.end, 'Specified config file could not be found.')) trigger", "not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified forward", "'XML Config: Invalid character detected in RV adapter sequence.')) trigger", "labels = [] for i, d in enumerate(data_file): data[i] =", "= os.path.join(instance_rundir, sample_root, 'SeqQC') align_path = os.path.join(instance_rundir, sample_root, 'Align') predict_path", "Colour.end, 'Job folder already exists. Delete existing folder? Y/N: '))", "ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output = temp_output return target_output", "= text return d ## ## Takes the formatted xml", "to see if the string provided by parameter string is", "True if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'XML Config: Parsing parameters successful!'))", "not a fa/fas file.')) trigger = True reverse_reference = self.config_dict['@reverse_reference']", "generate_atypical_xml(label, allele_object, index_path, direction): \"\"\" :param allele_object: :param index_path: :return:", "= True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for charbase in reverse_adapter: if", "mutate_list def scrape_summary_data(stage, input_report_file): ## ## If the argument input_report_file", "sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'XML Config: Parsing parameters", "', binary, '!')) raise NameError ## ## To determine which", "'' intv = allele_object.get_intervening() ccgstart = ''; ccgend = ''", "## Imports import string import os import errno import shutil", "chain_drop float is invalid.')) trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if not", "flag settings trim_adapter_base = ['A', 'G', 'C', 'T'] if demultiplexing_flag", "validity check if parsed_arguments.jobname: for character in parsed_arguments.jobname: if character", "[] file_count = len(sorted_input) if not file_count % 2 ==", "determine which binaries to check for ## AttributeError in the", "input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path, label + '.fa') temp_output = os.path.join(index_path,", "NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if snp_calling == 'True':", "if t.text: text = t.text.strip() if children or t.attrib: if", "trigger = True for xmlfile in parsed_arguments.config: if not check_input_files('.xml',xmlfile):", "Colour.end, 'XML Config: Sequence Quality control flag is not set", "= self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "##TODO docstring label = input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path, label +", "boolean_value = string.lower(boolean_value) in ('yes', 'y', 'true', 't', '1') return", "range(0, len(sorted_input), 2): file_pair = {} forward_data = sorted_input[i] reverse_data", "'True': trimming_type = self.config_dict['trim_flags']['@trim_type'] if not (trimming_type == 'Quality' or", "False def check_input_files(input_format, input_file): if input_file.endswith(input_format): return True return False", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified unpaired_pairing_penalty integer is", "trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',') for individual_indelpen in", "config_filename=None): ## ## Instance variables self.scriptdir = scriptdir self.config_filename =", "not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified max_length", "parsed_arguments.config: if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config", "stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ## ## Join typical and atypical reference", "-b style try: quality_control = instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping", "folder? Y/N: ')) if not (purge_choice.lower() == 'y') and not", "Specified min_length is not a valid integer.')) trigger = True", "Loci Nodes fp_input = etree.Element('input', type='fiveprime', flank=fp_flank) cag_region = etree.Element('input',", "= gap_extend_penalty_raw.split(',') for individual_gaextend in gap_extend_penalty: if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "'C', 'T'] if demultiplexing_flag == 'True': forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for", "exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'XML Config: Parsing", "= '\\033[93m' red = '\\033[91m' bold = '\\033[1m' underline =", "empty, True is returned. \"\"\" if string != '': return", "string detected!\") return True def sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check and", "descr_text = f.read() ## Loads data set from csv, into", "if target in input_list[i]: return i def sanitise_trimming_output(input_object, input_list): if", "not Quality/Adapter/Both.')) trigger = True quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if not", "except NameError: trigger=True try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError:", "= '\\033[95m' cyan = '\\033[96m' darkcyan = '\\033[36m' blue =", "cagend = '100' ccgstart = '1'; ccgend = '20' ##", "validated XML and extracts information from the tree into a", "Values for key if t.attrib: d[t.tag].update(('@' + k, v) for", "end in _R2. ', reverse_data)) sys.exit(2) ## ## Make Stage", "== 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing pre-existing Jobname Prefix:", "if not int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "', Colour.end, 'XML Config: Specified mismatch_penalty integer is invalid.')) trigger=True", "import glob import datetime import subprocess import logging as log", "reader. Opens a configuration file, and if valid, converts the", "if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "True/False.')) trigger = True ## ## Demultiplexing flag settings trim_adapter_base", "integer.')) trigger = True ## ## Trimming flag settings if", "'shd__ ', Colour.end, 'XML Config: Specified min_seed_length integer is invalid.'))", "'20' ## ## Create XML data_root = etree.Element('data') loci_root =", "n_features = int(temp[1]) data = np.empty((n_samples, n_features)) temp = next(data_file)", "tp_input]: loci_root.append(node) s = etree.tostring(data_root, pretty_print=True) with open(atypical_path, 'w') as", "= [] binary_string = 'type {}'.format(binary) binary_subprocess = subprocess.Popen([binary_string], shell=True,", "Colour.end, 'XML Config: Specified unpaired_pairing_penalty integer is invalid.')) trigger=True ##", "temp = next(data_file) n_samples = int(temp[0]) n_features = int(temp[1]) data", "Colour.end, 'Creating Output with prefix: ', jobname)) run_dir = os.path.join(output_root,", "adapter position invalid! [5P, 3P, AP]')) trigger = True reverse_adapter", "min_seed_length = self.config_dict['alignment_flags']['@min_seed_length'] if not min_seed_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "= etree.parse(self.config_filename) ## ## Check config vs dtd, parse info", "'True' or snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "a ValueError exception should be raised if the string is", "os.path.join(index_path, label + '.fa') temp_output = os.path.join(index_path, label + '_concat.fa')", "= list(t) ## ## If list was populated, create dictionary,", "binary_string = 'type {}'.format(binary) binary_subprocess = subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)", "= input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else: return '*' def sanitise_alignment_output(input_object, input_list,", "else: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2] return ' '.join(cleanse_target) else: return", "log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating output root... ')) mkdir_p(output_root) run_dir", "glob.glob(os.path.join(data_directory, '*')): if not (fqfile.endswith('.fq') or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or", "not minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_length", "self.__dict__ = self class DataLoader: def __init__(self, database, descriptor): self.database", "code ## Calls UNIX type for checking binaries present ##", "== 'True': try: type_func('picard') except NameError: trigger=True try: type_func('freebayes') except", "Create XML data_root = etree.Element('data') loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root)", "instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling'] if quality_control == 'True': try:type_func('java') except", "if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected compressed input", "NameError: trigger=True if snp_calling == 'True': try: type_func('picard') except NameError:", "a valid integer.')) trigger = True minimum_length = self.config_dict['demultiplex_flags']['@min_length'] if", "## Calls UNIX type for checking binaries present ## Changed", "now validated XML and extracts information from the tree into", "log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'XML Config: Parsing parameters successful!')) class", "' or character is '/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has", "could not be found.')) trigger = True if not (forward_reference.endswith('.fa')", "= os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating instance run", "quit \"\"\" trigger = False ## ## Jobname prefix validity", "Specified seed_length_extension float is invalid.')) trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if", "input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target) else: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2] return", "= os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root] = [forward_data, reverse_data, instance_path, seq_qc_path,", "if not float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "'shd__ ', Colour.end, 'XML Config: Specified unpaired_pairing_penalty integer is invalid.'))", "else: raise Exception('User chose not to delete pre-existing Job folder.", "NameError ## ## To determine which binaries to check for", "+ scraping_buffer] trpf.close() return summary_data[2:] ## ## If the argument", "sklearn import preprocessing from collections import defaultdict from xml.etree import", "'XML Config: Invalid character detected in forward_adapter demultiplexing flag.')) trigger", "'shd__ ', Colour.end, 'XML Config: Non FastQ/GZ data detected in", "set to True/False.')) trigger = True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if", "import cElementTree from lxml import etree from reportlab.pdfgen import canvas", "a fa/fas file.')) trigger = True reverse_reference = self.config_dict['@reverse_reference'] if", "Colour.end, 'XML Config: Specified trimming adapter not valid selection.')) trigger", "'XML Config: Failure, exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end,", "= True forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for charbase in forward_adapter: if", "shutil.rmtree(run_dir, ignore_errors=True) mkdir_p(run_dir) else: raise Exception('User chose not to delete", "= binary_subprocess.communicate() binary_subprocess.wait() if 'not found'.encode() in binary_result[0] or binary_result[1]:", "v[0] if len(v) == 1 else v for k, v", "return d ## ## Takes the formatted xml doc, puts", "\"\"\" boolean_value = string.lower(boolean_value) in ('yes', 'y', 'true', 't', '1')", "the file to a dictionary object, reader to be viewed", "Config: Sequence Quality control flag is not set to True/False.'))", "cctlen = allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction == 'fw':", "not (atypical_flag == 'True' or atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "respective data set modeldescr_name = self.descriptor with open(modeldescr_name) as f:", "toutfi = open(temp_output, 'w') cat_process = subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi,", "file is not a fa/fas file.')) trigger = True if", "run directory.. ')) mkdir_p(run_dir) ## Inform user it's all gonna", "_R2. ', reverse_data)) sys.exit(2) ## ## Make Stage outputs for", "## ## Imports import string import os import errno import", "contents. If all pass, guarantees that the settings dictionary is", "'XML Config: Non FastQ/GZ data detected in specified input directory.'))", "gap_extend_penalty: if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "self.descriptor = descriptor def load_model(self): ## Loads description file for", "'shd__ ', Colour.end, 'XML Config: Specified data directory could not", "run_dir = os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating instance", "in pipeline sample_root = '_'.join(forward_data_name.split('_')[:-1]) instance_path = os.path.join(instance_rundir) seq_qc_path =", "= self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag == 'True' or genotype_flag ==", "the string is empty, True is returned. \"\"\" if string", "= [forward_data, reverse_data, instance_path, seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair) return sequence_pairs", "= True if not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "cct_region, tp_input]: loci_root.append(node) s = etree.tostring(data_root, pretty_print=True) with open(atypical_path, 'w')", "## ## Check for configuration file (just incase) if self.config_filename", "## ## Instance flag settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if not", "(boolean_value), returns a boolean value representing the string contents. For", "FW adapter sequence.')) trigger = True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for", "chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "Specified reverse reference file is not a fa/fas file.')) trigger", "mkdir_p(run_dir) ## Inform user it's all gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green,", "in preparation for bunch() data_file_name = self.database with open(data_file_name) as", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O: Non-even number of input files", "configuration file specified!\") else: self.config_file = etree.parse(self.config_filename) ## ## Check", "if stage == 'gtype': pass def generate_atypical_xml(label, allele_object, index_path, direction):", "'XML Config: Specified unpaired_pairing_penalty integer is invalid.')) trigger=True ## ##", "try:type_func('generatr') except NameError: trigger=True if snp_calling == 'True': try: type_func('picard')", "'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Given demultiplexing reverse", "children: dd = defaultdict(list) for dc in map(recursive_generation, children): for", "\"\"\" def __init__(self, scriptdir, config_filename=None): ## ## Instance variables self.scriptdir", "## Instance flag settings demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag", "alignment = instance_params['sequence_alignment'] genotyping = instance_params['genotype_prediction'] snp_calling = instance_params['snp_calling'] if", "summary_start = 0 for i in range(0, len(trim_lines)): if '==", "since we already have the data from our own objects", "minimum_overlap = self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "or file. If the path exists, True is returned. If", "def sanitise_outputs(jobname, output_argument): run_dir = '' output_root = output_argument[0] if", "for charbase in reverse_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red,", "if direction == 'rv': cagstart = '100'; cagend = '100'", "DESCR=descr_text, ENCDR=le) def parse_boolean(boolean_value): \"\"\" Given a string (boolean_value), returns", "size to slice from above array scraping_buffer = 8 if", "if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file does not", "## Get Anchor summary_start = 0 for i in range(0,", "target_output = os.path.join(output_root, jobname) if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ',", "value invalid! Please use 1-10.')) trigger = True quality_cutoff =", "except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if snp_calling ==", "representing the string contents. For example, a string with 'true',", "continue else: break if purge_choice.lower() == 'y': log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ',", "load_model(self): ## Loads description file for respective data set modeldescr_name", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Quality Cutoff value", "typical and atypical reference into one file if direction ==", "= True forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not in ['5P',", "demultiplexing flag.')) trigger = True forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if forward_position", "trigger = True error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance), float):", "input_list, stage): if type(input_object) is int: if stage == 3:", "data directory could not be found.')) trigger = True for", "altered_path): if target_fqfile in mutate_list: loc = mutate_list.index(target_fqfile) mutate_list[loc] =", "import os import errno import shutil import sys import glob", "cat_process = subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output", "individual_indelpen in indel_penalty: if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "## Check reverse ends with R2 reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0] if", "= sorted_input[i+1].split('/')[-1].split('.')[0] if not reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file", "to check for ## AttributeError in the situation where instance_params", "for individual_gaextend in gap_extend_penalty: if not individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "\"\"\" if string != '': return False if raise_exception: raise", "trigger=True try:type_func('samtools') except NameError: trigger=True try:type_func('generatr') except NameError: trigger=True if", "'\\033[96m' darkcyan = '\\033[36m' blue = '\\033[94m' green = '\\033[92m'", "IOError is raised - else False is returned. \"\"\" if", "the string contents. For example, a string with 'true', 't',", "not to delete pre-existing Job folder. Cannot write output.') else:", "present so, quit \"\"\" trigger = False ## ## Jobname", "if genotype_flag == 'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit():", "isn't screwy on linux input_files = glob.glob(os.path.join(data_path, '*')) sorted_input =", "io import StringIO import PyPDF2 from sklearn import preprocessing from", "tolerance is not 0.0 < x < 1.0.')) trigger =", "reverse adapter position invalid! [5P, 3P, AP]')) trigger = True", "Invalid character detected in FW adapter sequence.')) trigger = True", "import datetime import subprocess import logging as log import numpy", "', Colour.end, 'XML Config: Sequence Alignment flag is not set", "(genotype_flag == 'True' or genotype_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "in glob.glob(os.path.join(data_directory, '*')): if not (fqfile.endswith('.fq') or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz')", "for node in [fp_input, cag_region, intervening, ccg_region, cct_region, tp_input]: loci_root.append(node)", "integer is invalid.')) trigger=True mismatch_penalty = self.config_dict['alignment_flags']['@mismatch_penalty'] if not mismatch_penalty.isdigit():", "Make Stage outputs for use in everywhere else in pipeline", "could not be found.')) return False def check_input_files(input_format, input_file): if", "allele_object.get_ccg() cctlen = allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction ==", "True for fqfile in glob.glob(os.path.join(data_directory, '*')): if not (fqfile.endswith('.fq') or", "xml.etree import cElementTree from lxml import etree from reportlab.pdfgen import", "for datetime if not os.path.exists(output_root): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating", "## Subfunction for recycling code ## Calls UNIX type for", "atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if not (atypical_flag == 'True' or atypical_flag", "= True if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "= f.read() ## Loads data set from csv, into objects", "{t.tag: {k: v[0] if len(v) == 1 else v for", "so, quit \"\"\" trigger = False ## ## Jobname prefix", "Config: Specified chain_drop float is invalid.')) trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop']", "Specified max_length is not a valid integer.')) trigger = True", "binaries to check for ## AttributeError in the situation where", "direction == 'fw': cagstart = '1'; cagend = '200' ccgstart", "file, and if valid, converts the parameters within the file", "## ## Loci Nodes fp_input = etree.Element('input', type='fiveprime', flank=fp_flank) cag_region", "seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair) return sequence_pairs def filesystem_exists_check(path, raise_exception=True): \"\"\"", "'Detected compressed input data. Extracting!')) break for extract_target in target_files:", "'<EMAIL>' ## ## Imports import string import os import errno", "is not Quality/Adapter/Both.')) trigger = True quality_threshold = self.config_dict['trim_flags']['@quality_threshold'] if", "\"\"\" if os.path.lexists(path): return True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input", "node in [fp_input, cag_region, intervening, ccg_region, cct_region, tp_input]: loci_root.append(node) s", "is not a fa/fas file.')) trigger = True if forward_reference.split('/')[-1]", "True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase", "= self.database with open(data_file_name) as f: data_file = csv.reader(f) temp", "try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST", "## Changed from WHICH as apparently type functions over different", "as attributes.\"\"\" def __init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__ = self", "## ## Determine buffer size to slice from above array", "file.')) trigger = True if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "## Loads description file for respective data set modeldescr_name =", "or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified forward", "if string != '': return False if raise_exception: raise ValueError(\"Empty", "from sklearn import preprocessing from collections import defaultdict from xml.etree", "by parameter path, exists. Can be either a directory or", "collections import defaultdict from xml.etree import cElementTree from lxml import", "instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control = instance_params['quality_control'] alignment = instance_params['sequence_alignment'] genotyping", "walltime = datetime.datetime.now().strftime('%H%M%S') today = date + '-' + walltime", "trigger = True error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if not error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "ref_indexes, direction): ##TODO docstring label = input_xml.split('/')[-1].split('.')[0] target_output = os.path.join(index_path,", "NameError: trigger=True if alignment == 'True': try:type_func('seqtk') except NameError: trigger=True", "etree.Element('input', type='threeprime', flank=tp_flank) for node in [fp_input, cag_region, intervening, ccg_region,", "sequence.')) trigger = True reverse_adapter = self.config_dict['trim_flags']['@reverse_adapter'] for charbase in", "seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seq_match_score integer", "reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified reverse reference", "target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close() target_output = temp_output return", "if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP", "validate_against_dtd(self): \"\"\" Validate input config against DTD ruleset i.e. confirms", "trigger = True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag ==", "etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend) cct_region = etree.Element('input', type='repeat_region',", "type functions over different shells/config files def type_func(binary): binary_result =", "= glob.glob(os.path.join(data_path, '*')) sorted_input = sorted(input_files) sequence_pairs = [] file_count", "first line return align_lines[1:] ## ## No need to tidy", "not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified band_width", "is not set to True/False.')) trigger = True atypical_flag =", "Alignment flag settings if alignment_flag == 'True': min_seed_length = self.config_dict['alignment_flags']['@min_seed_length']", "Colour.end, 'I/O: Non-even number of input files specified. Cannot continue", "= os.path.join(instance_rundir, sample_root, 'Align') predict_path = os.path.join(instance_rundir, sample_root, 'Predict') file_pair[sample_root]", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified indel_penalty integer(s) is(are)", "def load_model(self): ## Loads description file for respective data set", "self.validate_against_dtd() self.set_dictionary() self.validate_config() def validate_against_dtd(self): \"\"\" Validate input config against", "reverse reference file is not a fa/fas file.')) trigger =", "ruleset self.validate_against_dtd() self.set_dictionary() self.validate_config() def validate_against_dtd(self): \"\"\" Validate input config", "quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP Quality Cutoff", "dict.__init__(self, kwargs) self.__dict__ = self class DataLoader: def __init__(self, database,", "for respective data set modeldescr_name = self.descriptor with open(modeldescr_name) as", "- else False is returned. \"\"\" if os.path.lexists(path): return True", "range(0, len(trim_lines)): if '== Summary ==' in trim_lines[i]: summary_start =", "files from data path ## Sort so that ordering isn't", "log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected compressed input data. Extracting!')) break", "Colour.end, 'XML Config: Invalid character detected in FW adapter sequence.'))", "forward_adapter = self.config_dict['demultiplex_flags']['@forward_adapter'] for charbase in forward_adapter: if charbase not", "Colour.end, 'Output directories OK!')) return run_dir def replace_fqfile(mutate_list, target_fqfile, altered_path):", "'True': try:type_func('java') except NameError: trigger=True try:type_func('fastqc') except NameError: trigger=True try:type_func('cutadapt')", "dictionary is full of valid settings! \"\"\" trigger = False", "log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating instance run directory.. ')) mkdir_p(run_dir)", "against DTD ruleset i.e. confirms conformation of XML structure \"\"\"", "os.path.join(output_root, jobname) mkdir_p(run_dir) else: purge_choice = '' while True: purge_choice", "'shd__ ', Colour.end, 'XML Config: Invalid character detected in FW", "= True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if reverse_position not in ['5P',", "label = d[-1] labels.append(label) le = preprocessing.LabelEncoder() le.fit(labels) hash_int_labels =", "demultiplexing_flag = self.config_dict['instance_flags']['@demultiplex'] if not (demultiplexing_flag == 'True' or demultiplexing_flag", "self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',') for individual_indelpen in indel_penalty: if not", "and RV references have identical filenames. Will create indexing issue.'))", "x < 1.0.')) trigger = True ## ## Alignment flag", "in range(0, len(input_list)): if target in input_list[i]: return i def", "if quality_control == 'True': try:type_func('java') except NameError: trigger=True try:type_func('fastqc') except", "'True' or atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "Colour.end, 'XML Config: Given demultiplexing forward adapter position invalid! [5P,", "'shd__ ', Colour.end, 'XML Config: Given demultiplexing reverse adapter position", "number of input files specified. Cannot continue without pairing!')) sys.exit(2)", "True error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "SNP Quality Cutoff value is not an integer.')) trigger =", "', Colour.end, 'XML Config: Specified band_width integer is invalid.')) trigger=True", "recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\" Method which validates", "python dictionary {key: value}. This dictionary will be used for", "self.config_dict = recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\" Method", "'XML Config: SNP Calling flag is not True/False.')) trigger =", "individual_gaextend.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified gap_extend_penalty integer(s)", "is not True/False.')) trigger = True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if", "reverse_data, instance_path, seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair) return sequence_pairs def filesystem_exists_check(path,", "minimum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_length is", "(forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "invalid.')) trigger=True seed_length_extension = self.config_dict['alignment_flags']['@seed_length_extension'] if not isinstance(float(seed_length_extension), float): log.error('{}{}{}{}'.format(Colour.red,", "'.fa') temp_output = os.path.join(index_path, label + '_concat.fa') gen_process = subprocess.Popen(['generatr',", "raise_exception=True): \"\"\" Simple check to see if the string provided", "dictionary, Append keys if children: dd = defaultdict(list) for dc", "True ## ## Config mode check if parsed_arguments.config: if not", "True/False.')) trigger = True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag", "def scrape_summary_data(stage, input_report_file): ## ## If the argument input_report_file is", "pretty_print=True) with open(atypical_path, 'w') as xmlfi: xmlfi.write(s.decode()) xmlfi.close() return atypical_path", "string is empty. If raise_exception is False and the string", "'XML Config: Demultiplexing flag is not set to True/False.')) trigger", "trimming_type == 'Adapter' or trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified chain_drop", "file could not be found.')) trigger = True for xmlfile", "= next(data_file) feature_names = np.array(temp) labels = [] for i,", "datetime.date.today().strftime('%d-%m-%Y') walltime = datetime.datetime.now().strftime('%H%M%S') today = date + '-' +", "dtd_object.validate(self.config_file): dtd_file.close() log.error(\"DTD validation failure {0}: {1}\".format(self.config_filename, dtd_object.error_log.filter_from_errors()[0])) sys.exit(2) dtd_file.close()", "allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction == 'fw': cagstart =", "either a directory or file. If the path exists, True", "trigger=True try:type_func('generatr') except NameError: trigger=True if snp_calling == 'True': try:", "mutate_list: loc = mutate_list.index(target_fqfile) mutate_list[loc] = altered_path return mutate_list def", "Job folder. Cannot write output.') else: ## Ensures root output", "0 for i in range(0, len(trim_lines)): if '== Summary =='", "invalid.')) trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty = indel_penalty_raw.split(',') for individual_indelpen", "object, reader to be viewed through accessing the config_dict variable.", "True alignment_flag = self.config_dict['instance_flags']['@sequence_alignment'] if not (alignment_flag == 'True' or", "'XML Config: Specified prime_clipping_penalty integer(s) is(are) invalid.')) trigger=True unpaired_pairing_penalty =", "Recursion adapted from http://stackoverflow.com/a/9286702 \"\"\" def recursive_generation(t): d = {t.tag:", "trigger = True quality_cutoff = self.config_dict['prediction_flags']['@quality_cutoff'] if not quality_cutoff.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "for -c style, except AttributeError for -b style try: quality_control", "trigger = True ## ## Demultiplexing flag settings trim_adapter_base =", "{key: value}. This dictionary will be used for variables within", "valid float.')) trigger = True if not float(error_tolerance) in np.arange(0,1.1,0.01):", "is not an XML file.')) trigger = True return trigger", "self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "self.config_dict['demultiplex_flags']['@max_length'] if not maximum_length == '': if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red,", "isn't recycled for i in range(0, len(sorted_input), 2): file_pair =", "self.database = database self.descriptor = descriptor def load_model(self): ## Loads", "ccg_region, cct_region, tp_input]: loci_root.append(node) s = etree.tostring(data_root, pretty_print=True) with open(atypical_path,", "gen_process.wait() ## ## Join typical and atypical reference into one", "Colour.end, 'XML Config: SNP Calling flag is not True/False.')) trigger", "is returned. \"\"\" if os.path.lexists(path): return True if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__", "sorted(input_files) sequence_pairs = [] file_count = len(sorted_input) if not file_count", "try:type_func('seqtk') except NameError: trigger=True try:type_func('bwa') except NameError: trigger=True try:type_func('samtools') except", "folder already exists. Delete existing folder? Y/N: ')) if not", "'/': log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has invalid characters: \"', character,", "(atypical_flag == 'True' or atypical_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_overlap", "'gtype': pass def generate_atypical_xml(label, allele_object, index_path, direction): \"\"\" :param allele_object:", "def validate_against_dtd(self): \"\"\" Validate input config against DTD ruleset i.e.", "= open(self.dtd_filename, 'r') dtd_object = etree.DTD(dtd_file) ## ## If validation", "file's contents. If all pass, guarantees that the settings dictionary", "integer is invalid.')) trigger=True band_width = self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit():", "'w') cat_process = subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE) cat_process.wait() toutfi.close()", "'shd__ ', Colour.end, 'XML Config: Specified prime_clipping_penalty integer(s) is(are) invalid.'))", "dictionary-like object that exposes its keys as attributes.\"\"\" def __init__(self,", "for fqfile in glob.glob(os.path.join(data_directory, '*')): if not (fqfile.endswith('.fq') or fqfile.endswith('.fastq')", "predict_path] sequence_pairs.append(file_pair) return sequence_pairs def filesystem_exists_check(path, raise_exception=True): \"\"\" Checks to", "= True snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag == 'True'", "'shd__ ', Colour.end, 'XML Config: SNP Calling flag is not", "in parsed_arguments.config: if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified", "t.attrib.items()) if t.text: text = t.text.strip() if children or t.attrib:", "maximum_length == '': if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified trimming adapter", "in enumerate(data_file): data[i] = d[:-1] label = d[-1] labels.append(label) le", "'t', '1') return boolean_value def empty_string_check(string, raise_exception=True): \"\"\" Simple check", "3: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target) else: cleanse_target =", "self.config_dict['demultiplex_flags']['@min_overlap'] if not minimum_overlap.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "data_directory = self.config_dict['@data_dir'] if not os.path.exists(data_directory): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "True for xmlfile in parsed_arguments.config: if not check_input_files('.xml',xmlfile): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "provided by parameter string is empty. False indicates the string", "= etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen)) tp_input = etree.Element('input',", "forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file does not end in", "position invalid! [5P, 3P, AP]')) trigger = True reverse_adapter =", "in _R1. ', forward_data)) sys.exit(2) ## ## Check reverse ends", "== 'True': trimming_type = self.config_dict['trim_flags']['@trim_type'] if not (trimming_type == 'Quality'", "sample_root, 'SeqQC') align_path = os.path.join(instance_rundir, sample_root, 'Align') predict_path = os.path.join(instance_rundir,", "'XML Config: Specified gap_extend_penalty integer(s) is(are) invalid.')) trigger=True prime_clipping_penalty_raw =", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified min_length is not", "input path could not be found.')) return False def check_input_files(input_format,", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified data directory could", "charbase in forward_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__", "return True def sequence_pairings(data_path, instance_rundir): ## ## Get input files", "os.path.join(instance_rundir, sample_root, 'SeqQC') align_path = os.path.join(instance_rundir, sample_root, 'Align') predict_path =", "Sequence Alignment flag is not set to True/False.')) trigger =", "in reverse_adapter demultiplexing flag.')) trigger = True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position']", "= mutate_list.index(target_fqfile) mutate_list[loc] = altered_path return mutate_list def scrape_summary_data(stage, input_report_file):", "3P, AP]')) trigger = True reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase", "instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling = instance_params.config_dict['instance_flags']['@snp_calling'] except AttributeError: quality_control = instance_params['quality_control'] alignment", "= os.path.join(output_root, jobname) mkdir_p(run_dir) else: purge_choice = '' while True:", "def mkdir_p(path): try: os.makedirs(path) except OSError as exc: if exc.errno", "for genotyping ## since we already have the data from", "'True' or sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified max_length is", "root... ')) mkdir_p(output_root) run_dir = os.path.join(output_root, 'ScaleHDRun_'+today) log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ',", "vs dtd, parse info to dictionary, validate vs ruleset self.validate_against_dtd()", "min_seed_length integer is invalid.')) trigger=True band_width = self.config_dict['alignment_flags']['@band_width'] if not", "= allele_object.get_ccg() cctlen = allele_object.get_cct() tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction", "xmlfi: xmlfi.write(s.decode()) xmlfi.close() return atypical_path def generate_reference(input_xml, index_path, ref_indexes, direction):", "Given a string (boolean_value), returns a boolean value representing the", "__init__(self, **kwargs): dict.__init__(self, kwargs) self.__dict__ = self class DataLoader: def", "def sequence_pairings(data_path, instance_rundir): ## ## Get input files from data", "= cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self):", "forward_data = sorted_input[i] reverse_data = sorted_input[i+1] ## ## Check forward", "bunch() data_file_name = self.database with open(data_file_name) as f: data_file =", "import shutil import sys import glob import datetime import subprocess", "filesystem_exists_check(path, raise_exception=True): \"\"\" Checks to see if the path, specified", "path exists, True is returned. If the path does not", "'trim': with open(input_report_file, 'r') as trpf: trim_lines = trpf.readlines() ##", "= subprocess.Popen(['generatr', '-i', input_xml, '-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ##", "', Colour.end, 'Creating Output with prefix: ', jobname)) run_dir =", "Config: Atypical Realignment flag is not True/False.')) trigger = True", "parse_boolean(boolean_value): \"\"\" Given a string (boolean_value), returns a boolean value", "is int: cleanse_target = input_list[input_object].split(':')[1].lstrip().rstrip() return cleanse_target else: return '*'", "return '*' def mkdir_p(path): try: os.makedirs(path) except OSError as exc:", "Config: Specified data directory could not be found.')) trigger =", "Summary ==' in trim_lines[i]: summary_start = i ## ## Slice", "## Check for configuration file (just incase) if self.config_filename is", "= True trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if not", "sample_root, 'Predict') file_pair[sample_root] = [forward_data, reverse_data, instance_path, seq_qc_path, align_path, predict_path]", "(sequence_qc_flag == 'True' or sequence_qc_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "is None: log.error(\"No configuration file specified!\") else: self.config_file = etree.parse(self.config_filename)", "and close summary_data = trim_lines[summary_start:summary_start + scraping_buffer] trpf.close() return summary_data[2:]", "open(modeldescr_name) as f: descr_text = f.read() ## Loads data set", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Non FastQ/GZ data detected", "', Colour.end, 'XML Config: Specified min_seed_length integer is invalid.')) trigger=True", "= altered_path return mutate_list def scrape_summary_data(stage, input_report_file): ## ## If", "', Colour.end, 'XML Config: Non FastQ/GZ data detected in specified", "if t.attrib else None} children = list(t) ## ## If", "a string with 'true', 't', 'y' or 'yes' will yield", "subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate() binary_subprocess.wait() if 'not", "Colour.end, 'XML Config: Sequence Alignment flag is not set to", "if '== Summary ==' in trim_lines[i]: summary_start = i ##", "= self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase not in", "= i ## ## Slice and close summary_data = trim_lines[summary_start:summary_start", "d = {t.tag: {k: v[0] if len(v) == 1 else", "trigger = True forward_reference = self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red,", "'shd__ ', Colour.end, 'Clearing pre-existing Jobname Prefix: ', jobname)) run_dir", "Subfunction for recycling code ## Calls UNIX type for checking", "yellow = '\\033[93m' red = '\\033[91m' bold = '\\033[1m' underline", "file_pair[sample_root] = [forward_data, reverse_data, instance_path, seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair) return", "or alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "cyan = '\\033[96m' darkcyan = '\\033[36m' blue = '\\033[94m' green", "R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward", "Config: Specified seeded_chain_drop integer is invalid.')) trigger=True seq_match_score = self.config_dict['alignment_flags']['@seq_match_score']", "NameError: trigger=True try:type_func('cutadapt') except NameError: trigger=True if alignment == 'True':", "True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag == 'True' or", "if input_file.endswith(input_format): return True return False def initialise_libraries(instance_params): trigger =", "Colour.end, 'XML Config: Specified gap_extend_penalty integer(s) is(are) invalid.')) trigger=True prime_clipping_penalty_raw", "if jobname: target_output = os.path.join(output_root, jobname) if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold,", "self.config_dict['trim_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase not in trim_adapter_base:", "label=allele_object.get_reflabel()); data_root.append(loci_root) ## ## Loci Nodes fp_input = etree.Element('input', type='fiveprime',", "= '1'; ccgend = '20' if direction == 'rv': cagstart", "text else: d[t.tag] = text return d ## ## Takes", "forward ends with R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0] if not forward_data_name.endswith('_R1'):", "be viewed through accessing the config_dict variable. \"\"\" def __init__(self,", "False if raise_exception: raise ValueError(\"Empty string detected!\") return True def", "(snpcall_flag == 'True' or snpcall_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "Colour.end, 'XML Config: Atypical Realignment flag is not True/False.')) trigger", "mkdir_p(path): try: os.makedirs(path) except OSError as exc: if exc.errno ==", "trigger = False ## ## Jobname prefix validity check if", "True. \"\"\" boolean_value = string.lower(boolean_value) in ('yes', 'y', 'true', 't',", "Specified error tolerance is not a valid float.')) trigger =", "', Colour.end, 'XML Config: Failure, exiting.')) sys.exit(2) else: log.info('{}{}{}{}'.format(Colour.green, 'shd__", "flag is not True/False.')) trigger = True snpcall_flag = self.config_dict['instance_flags']['@snp_calling']", "a valid integer.')) trigger = True ## ## Trimming flag", "= instance_params.config_dict['instance_flags']['@quality_control'] alignment = instance_params.config_dict['instance_flags']['@sequence_alignment'] genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction'] snp_calling =", "Colour.end, 'XML Config: Non FastQ/GZ data detected in specified input", "valid integer.')) trigger = True maximum_length = self.config_dict['demultiplex_flags']['@max_length'] if not", "it ## Then make the run directory for datetime if", "skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified skip_seed_with_occurrence integer", "in indel_penalty: if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "settings if genotype_flag == 'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if not", "Specified prime_clipping_penalty integer(s) is(are) invalid.')) trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if", "else: ## Ensures root output is a real directory ##", "set from csv, into objects in preparation for bunch() data_file_name", "not be found.')) trigger = True if not (forward_reference.endswith('.fa') or", "self.config_file = etree.parse(self.config_filename) ## ## Check config vs dtd, parse", "Sequence Quality control flag is not set to True/False.')) trigger", "fa/fas file.')) trigger = True if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red,", "if not (reverse_reference.endswith('fa') or reverse_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "database self.descriptor = descriptor def load_model(self): ## Loads description file", "'1'; ccgend = '20' ## ## Create XML data_root =", "and extracts information from the tree into a python dictionary", "le = preprocessing.LabelEncoder() le.fit(labels) hash_int_labels = le.transform(labels) return DataClump(DATA=data, TARGET=hash_int_labels,", "np import csv from io import StringIO import PyPDF2 from", "Colour.end, 'XML Config: SNP Quality Cutoff value is not an", "else None} children = list(t) ## ## If list was", "== 'fw': toutfi = open(temp_output, 'w') cat_process = subprocess.Popen(['cat', target_output,", "not (alignment_flag == 'True' or alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "specified root doesn't exist, make it ## Then make the", "if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating Output with", "as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else:", "in range(0,39): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified quality", "set to True/False.')) trigger = True atypical_flag = self.config_dict['instance_flags']['@atypical_realignment'] if", "for individual_indelpen in indel_penalty: if not individual_indelpen.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified band_width integer is", "def check_input_files(input_format, input_file): if input_file.endswith(input_format): return True return False def", "populated, create dictionary, Append keys if children: dd = defaultdict(list)", "isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error", "trigger = True forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for charbase in forward_adapter:", "self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if not (snpcall_flag == 'True' or snpcall_flag", "log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file does not end in _R1.", "stderr=subprocess.PIPE) binary_result = binary_subprocess.communicate() binary_subprocess.wait() if 'not found'.encode() in binary_result[0]", "in ('yes', 'y', 'true', 't', '1') return boolean_value def empty_string_check(string,", "file does not end in _R1. ', forward_data)) sys.exit(2) ##", "trigger = True if forward_reference.split('/')[-1] == reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "t.attrib: d[t.tag].update(('@' + k, v) for k, v in t.attrib.items())", "invalid.')) trigger=True unpaired_pairing_penalty = self.config_dict['alignment_flags']['@unpaired_pairing_penalty'] if not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "Config mode check if parsed_arguments.config: if not filesystem_exists_check(parsed_arguments.config[0]): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "= os.path.join(index_path, label + '_concat.fa') gen_process = subprocess.Popen(['generatr', '-i', input_xml,", "invalid.')) trigger = True elif not int(quality_threshold) in range(0,39): log.error('{}{}{}{}'.format(Colour.red,", "is empty, True is returned. \"\"\" if string != '':", "True if not float(error_tolerance) in np.arange(0,1.1,0.01): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "'-d', extract_target], stderr=subprocess.PIPE) unzipd.wait() return True def sequence_pairings(data_path, instance_rundir): ##", "not unpaired_pairing_penalty.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified unpaired_pairing_penalty", "cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:1] return ''.join(cleanse_target) else: cleanse_target = input_list[input_object].lstrip().rstrip().split('", "#/usr/bin/python __version__ = '1.0' __author__ = '<EMAIL>' ## ## Imports", "in specified input directory.')) trigger = True forward_reference = self.config_dict['@forward_reference']", "does not end in _R1. ', forward_data)) sys.exit(2) ## ##", "settings if sequence_qc_flag == 'True': trimming_type = self.config_dict['trim_flags']['@trim_type'] if not", "be raised if the string is empty. If raise_exception is", "if not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "not snp_observation_pcnt.isdigit(): if not int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "Invalid character detected in reverse_adapter demultiplexing flag.')) trigger = True", "for k, v in dd.items()}} ## ## Values for key", "'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT' if direction == 'fw': cagstart = '1'; cagend =", "cElementTree.XML(string_repr) self.config_dict = recursive_generation(element_tree) self.config_dict = self.config_dict[list(self.config_dict.keys())[0]] def validate_config(self): \"\"\"", "', Colour.end, 'XML Config: FW and RV references have identical", "'XML Config: Specified band_width integer is invalid.')) trigger=True seed_length_extension =", "the situation where instance_params origin differs ## try for -c", "Given demultiplexing forward adapter position invalid! [5P, 3P, AP]')) trigger", "''.join(cleanse_target) else: cleanse_target = input_list[input_object].lstrip().rstrip().split(' ')[0:2] return ' '.join(cleanse_target) else:", "kwargs) self.__dict__ = self class DataLoader: def __init__(self, database, descriptor):", "Non-even number of input files specified. Cannot continue without pairing!'))", "t.text: text = t.text.strip() if children or t.attrib: if text:", "'shd__ ', Colour.end, 'XML Config: Specified quality threshold integer out", "object dtd_file = open(self.dtd_filename, 'r') dtd_object = etree.DTD(dtd_file) ## ##", "snp_calling = instance_params['snp_calling'] if quality_control == 'True': try:type_func('java') except NameError:", "Invalid character detected in forward_adapter demultiplexing flag.')) trigger = True", "= self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "Colour.end, 'XML Config: Genotype Prediction control flag is not True/False.'))", "as apparently type functions over different shells/config files def type_func(binary):", "'w') as xmlfi: xmlfi.write(s.decode()) xmlfi.close() return atypical_path def generate_reference(input_xml, index_path,", "Colour.end, 'XML Config: Specified min_seed_length integer is invalid.')) trigger=True band_width", "if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "', Colour.end, 'XML Config: Specified seed_length_extension float is invalid.')) trigger=True", "= sorted_input[i] reverse_data = sorted_input[i+1] ## ## Check forward ends", "' '.join(cleanse_target) else: return '*' def mkdir_p(path): try: os.makedirs(path) except", "[forward_data, reverse_data, instance_path, seq_qc_path, align_path, predict_path] sequence_pairs.append(file_pair) return sequence_pairs def", "mismatch_penalty integer is invalid.')) trigger=True indel_penalty_raw = self.config_dict['alignment_flags']['@indel_penalty'] indel_penalty =", "invalid or unsupported files present so, quit \"\"\" trigger =", "outputs for use in everywhere else in pipeline sample_root =", "if not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML", "## ## Check forward ends with R1 forward_data_name = sorted_input[i].split('/')[-1].split('.')[0]", "## Values for key if t.attrib: d[t.tag].update(('@' + k, v)", "= True genotype_flag = self.config_dict['instance_flags']['@genotype_prediction'] if not (genotype_flag == 'True'", "else False is returned. \"\"\" if os.path.lexists(path): return True if", "invalid.')) trigger=True seeded_chain_drop = self.config_dict['alignment_flags']['@seeded_chain_drop'] if not seeded_chain_drop.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "= True sequence_qc_flag = self.config_dict['instance_flags']['@quality_control'] if not (sequence_qc_flag == 'True'", "def __init__(self): pass purple = '\\033[95m' cyan = '\\033[96m' darkcyan", "input files specified. Cannot continue without pairing!')) sys.exit(2) ## ##", "not (purge_choice.lower() == 'y') and not (purge_choice.lower() == 'n'): log.info('{}{}{}{}'.format(Colour.red,", "children): for k, v in dc.items(): dd[k].append(v) d = {t.tag:", "invalid.')) trigger=True chain_drop = self.config_dict['alignment_flags']['@chain_drop'] if not isinstance(float(chain_drop), float): log.error('{}{}{}{}'.format(Colour.red,", "\"\"\" ##TODO docstring atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank", "== 'True' or alignment_flag == 'False'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "## Inform user it's all gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__", "sanitise_inputs(parsed_arguments): \"\"\" Utilises filesystem_exists_check and check_input_files if either return false,", "character, '\"')) trigger = True ## ## Config mode check", "1.0.')) trigger = True ## ## Alignment flag settings if", "if not maximum_length.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified", "output_argument): run_dir = '' output_root = output_argument[0] if jobname: target_output", "unit='CAG', start=cagstart, end=cagend) intervening = etree.Element('input', type='intervening', sequence=intv, prior='1') ccg_region", "file_pair = {} forward_data = sorted_input[i] reverse_data = sorted_input[i+1] ##", "charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config:", "not seq_match_score.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified seq_match_score", "(just incase) if self.config_filename is None: log.error(\"No configuration file specified!\")", "empty. False indicates the string is NOT empty. Parameter raise_exception", "== 'True': try:type_func('seqtk') except NameError: trigger=True try:type_func('bwa') except NameError: trigger=True", "## If the argument input_report_file is from alignment.. if stage", "selection.')) trigger = True forward_adapter = self.config_dict['trim_flags']['@forward_adapter'] for charbase in", "fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Non", "'3P', 'AP']: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Given demultiplexing", "reverse_data_name.endswith('_R2'): log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file does not end in", "'XML Config: Given demultiplexing reverse adapter position invalid! [5P, 3P,", "seed_length_extension float is invalid.')) trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not", "is not True/False.')) trigger = True snpcall_flag = self.config_dict['instance_flags']['@snp_calling'] if", "True forward_position = self.config_dict['demultiplex_flags']['@forward_position'] if forward_position not in ['5P', '3P',", "Calls UNIX type for checking binaries present ## Changed from", "trigger = False ## ## Subfunction for recycling code ##", "path, exists. Can be either a directory or file. If", "adapter_flag = self.config_dict['trim_flags']['@adapter_flag'] if not (adapter_flag in trim_adapters): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "genotype_flag == 'True': snp_observation_pcnt = self.config_dict['prediction_flags']['@snp_observation_threshold'] if not snp_observation_pcnt.isdigit(): if", "= allele_object.get_intervening() ccgstart = ''; ccgend = '' ccglen =", "files def type_func(binary): binary_result = [] binary_string = 'type {}'.format(binary)", "input Y or N.')) continue else: break if purge_choice.lower() ==", "with open(atypical_path, 'w') as xmlfi: xmlfi.write(s.decode()) xmlfi.close() return atypical_path def", "= self.config_dict['trim_flags']['@trim_type'] if not (trimming_type == 'Quality' or trimming_type ==", "configuration file's contents. If all pass, guarantees that the settings", "red = '\\033[91m' bold = '\\033[1m' underline = '\\033[4m' end", "{k: v[0] if len(v) == 1 else v for k,", "trimming_type == 'Both'): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Trimming", "binary_result[1]: log.critical('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Missing binary: ', binary, '!')) raise NameError ##", "Calling flag is not True/False.')) trigger = True ## ##", "parameter path, exists. Can be either a directory or file.", "purge_choice = '' while True: purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ',", "self.scriptdir = scriptdir self.config_filename = config_filename self.dtd_filename = scriptdir +", "[5P, 3P, AP]')) trigger = True error_rate = self.config_dict['demultiplex_flags']['@error_rate'] if", "gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output directories OK!'))", "Takes the formatted xml doc, puts through generator, returns dictionary", "Jobname Prefix: ', jobname)) run_dir = os.path.join(output_root, jobname) if os.path.exists(run_dir):", "= os.path.join(output_root, jobname) if not os.path.exists(target_output): log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end,", "Genotype prediction flag settings if genotype_flag == 'True': snp_observation_pcnt =", "def replace_fqfile(mutate_list, target_fqfile, altered_path): if target_fqfile in mutate_list: loc =", "if sequence_qc_flag == 'True': trimming_type = self.config_dict['trim_flags']['@trim_type'] if not (trimming_type", "required, only skip first line return align_lines[1:] ## ## No", "trigger=True band_width = self.config_dict['alignment_flags']['@band_width'] if not band_width.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "WHICH as apparently type functions over different shells/config files def", "chose not to delete pre-existing Job folder. Cannot write output.')", "Utilises filesystem_exists_check and check_input_files if either return false, path is", "trigger=True if genotyping == 'True': try:type_func('samtools') except NameError: trigger=True try:type_func('generatr')", "the data from our own objects if stage == 'gtype':", "cleanse_target else: return '*' def sanitise_alignment_output(input_object, input_list, stage): if type(input_object)", "numpy as np import csv from io import StringIO import", "file to a dictionary object, reader to be viewed through", "docstring atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel())) fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC'", "DataLoader: def __init__(self, database, descriptor): self.database = database self.descriptor =", "generator, returns dictionary string_repr = etree.tostring(self.config_file, pretty_print=True) element_tree = cElementTree.XML(string_repr)", "'shd__ ', Colour.end, 'XML Config: Specified error tolerance is not", "in trim_lines[1]: scraping_buffer += 1 ## ## Get Anchor summary_start", "== reverse_reference.split('/')[-1]: log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: FW and", "trigger = True reverse_reference = self.config_dict['@reverse_reference'] if not os.path.isfile(reverse_reference): log.error('{}{}{}{}'.format(Colour.red,", "reverse_adapter demultiplexing flag.')) trigger = True reverse_position = self.config_dict['demultiplex_flags']['@reverse_position'] if", "valid, converts the parameters within the file to a dictionary", "xmlfi.close() return atypical_path def generate_reference(input_xml, index_path, ref_indexes, direction): ##TODO docstring", "', Colour.end, 'XML Config: Specified seq_match_score integer is invalid.')) trigger=True", "boolean_value def empty_string_check(string, raise_exception=True): \"\"\" Simple check to see if", "'shd__ ', Colour.end, 'XML Config: Specified mismatch_penalty integer is invalid.'))", "Config: Demultiplexing flag is not set to True/False.')) trigger =", "## ## If validation fails, close the object (memory) and", "invalid.')) trigger=True skip_seed_with_occurrence = self.config_dict['alignment_flags']['@skip_seed_with_occurrence'] if not skip_seed_with_occurrence.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__", "an IOError is raised - else False is returned. \"\"\"", "= 8 if '-q' in trim_lines[1]: scraping_buffer += 1 ##", "'-i', input_xml, '-o', target_output], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gen_process.wait() ## ## Join", "')) mkdir_p(run_dir) ## Inform user it's all gonna be okaaaayyyy", "f: data_file = csv.reader(f) temp = next(data_file) n_samples = int(temp[0])", "for charbase in forward_adapter: if charbase not in trim_adapter_base: log.error('{}{}{}{}'.format(Colour.red,", "else: purge_choice = '' while True: purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__", "reverse_adapter = self.config_dict['demultiplex_flags']['@reverse_adapter'] for charbase in reverse_adapter: if charbase not", "it's all gonna be okaaaayyyy log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output", "'*')) for extract_target in target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__", "if children: dd = defaultdict(list) for dc in map(recursive_generation, children):", "is not a fa/fas file.')) trigger = True reverse_reference =", "return False if raise_exception: raise ValueError(\"Empty string detected!\") return True", "= True if not (forward_reference.endswith('.fa') or forward_reference.endswith('.fasta')): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "'shd__ ', Colour.end, 'XML Config: Atypical Realignment flag is not", "log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file could not be", "from above array scraping_buffer = 8 if '-q' in trim_lines[1]:", "StringIO import PyPDF2 from sklearn import preprocessing from collections import", "int(snp_observation_pcnt) in range(1,5): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: SNP", "Colour.end, 'XML Config: Specified min_overlap is not a valid integer.'))", "range (0-38).')) trigger = True trim_adapters = ['-a','-g','-a$','-g^','-b'] adapter_flag =", "= open(temp_output, 'w') cat_process = subprocess.Popen(['cat', target_output, ref_indexes[0]], stdout=toutfi, stderr=subprocess.PIPE)", "Extracting!')) break for extract_target in target_files: unzipd = subprocess.Popen(['gzip', '-q',", "target_files: if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')): log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected compressed", "## No ranges required, only skip first line return align_lines[1:]", "forward_reference = self.config_dict['@forward_reference'] if not os.path.isfile(forward_reference): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end,", "threshold integer out of range (0-38).')) trigger = True trim_adapters", "''; cagend = '' intv = allele_object.get_intervening() ccgstart = '';", "end=str(cctlen)) tp_input = etree.Element('input', type='threeprime', flank=tp_flank) for node in [fp_input,", "if raise_exception: log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could not be found.'))", "'type {}'.format(binary) binary_subprocess = subprocess.Popen([binary_string], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) binary_result =", "data_file = csv.reader(f) temp = next(data_file) n_samples = int(temp[0]) n_features", "scriptdir self.config_filename = config_filename self.dtd_filename = scriptdir + \"/config/config.dtd\" ##", "= trpf.readlines() ## ## Determine buffer size to slice from", "'fw': toutfi = open(temp_output, 'w') cat_process = subprocess.Popen(['cat', target_output, ref_indexes[0]],", "error_rate.isdigit(): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'XML Config: Specified error_rate is", "trpf.readlines() ## ## Determine buffer size to slice from above", "import numpy as np import csv from io import StringIO", "ConfigReader(object): \"\"\" The configuration file reader. Opens a configuration file,", "## ## No need to tidy up report for genotyping", "import PyPDF2 from sklearn import preprocessing from collections import defaultdict", "descriptor def load_model(self): ## Loads description file for respective data", "'*')): if not (fqfile.endswith('.fq') or fqfile.endswith('.fastq') or fqfile.endswith('.fq.gz') or fqfile.endswith('.fastq.gz')):", "file does not end in _R2. ', reverse_data)) sys.exit(2) ##", "Config: Specified error tolerance is not 0.0 < x <", "error_tolerance = self.config_dict['trim_flags']['@error_tolerance'] if not isinstance(float(error_tolerance), float): log.error('{}{}{}{}'.format(Colour.red, 'shd__ ',", "not a valid integer.')) trigger = True maximum_length = self.config_dict['demultiplex_flags']['@max_length']" ]
[ "64) assert isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride == (2, 2) assert", "(2, 2) assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv') x", "64) assert offset.shape == (2, 32, 32, 32, 32) assert", "64 assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention') x =", "32, 32, 32) if torch.cuda.is_available(): neck.cuda() res, offset = neck(x.cuda(),", "encoder.enc4.out_channels == 128 encoder = DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x =", "outputs = encoder(x) assert isinstance(outputs, dict) assert 'out' in outputs", "== 128 encoder = DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x = torch.randn((2,", "torch from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common import SimpleGatedConvModule", "64 encoder = DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2, 5, 256, 256))", "assert encoder.enc2.conv.out_channels == 48 * 2 def test_deepfill_contextual_attention_neck(): # TODO:", "== (2, 2) assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv')", "encoder = DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2, 5, 256, 256)) outputs", "outputs['out'] assert res.shape == (2, 128, 64, 64) assert encoder.enc2.stride", "DeepFillEncoder().cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x)", "== 64 assert encoder.enc4.out_channels == 128 encoder = DeepFillEncoder( conv_type='gated_conv',", "offset = neck(x.cuda(), mask.cuda()) assert res.shape == (2, 128, 64,", "== (2, 128, 64, 64) assert encoder.enc2.stride == (2, 2)", "DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x)", "64 assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x =", "32, 32) neck = ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res, offset =", "encoder = DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2, 5, 256, 256)) outputs", "mmedit.models.common import SimpleGatedConvModule def test_deepfill_enc(): encoder = DeepFillEncoder() x =", "= outputs['out'] assert res.shape == (2, 96, 64, 64) assert", "== 64 encoder = DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2, 5, 256,", "if torch.cuda.is_available(): neck.cuda() res, offset = neck(x.cuda(), mask.cuda()) assert res.shape", "TODO: add unittest for contextual attention module neck = ContextualAttentionNeck(in_channels=128)", "conv_type='gated_conv', channel_factor=0.75).cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs =", "x = torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x) assert", "32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 64 encoder", "assert offset.shape == (2, 32, 32, 32, 32) if torch.cuda.is_available():", "neck = ContextualAttentionNeck(in_channels=128) x = torch.rand((2, 128, 64, 64)) mask", "encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention')", "test_deepfill_contextual_attention_neck(): # TODO: add unittest for contextual attention module neck", "== 64 assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention') x", "mask = torch.zeros((2, 1, 64, 64)) mask[..., 20:100, 23:90] =", "= DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs =", "in outputs res = outputs['out'] assert res.shape == (2, 128,", "'out' in outputs res = outputs['out'] assert res.shape == (2,", "64) assert offset.shape == (2, 32, 32, 32, 32) neck", "def test_deepfill_contextual_attention_neck(): # TODO: add unittest for contextual attention module", "Copyright (c) OpenMMLab. All rights reserved. import torch from mmedit.models.backbones", "= torch.rand((2, 128, 64, 64)) mask = torch.zeros((2, 1, 64,", "64)) mask[..., 20:100, 23:90] = 1. res, offset = neck(x,", "neck = ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res, offset = neck(x.cuda(), mask.cuda())", "(2, 96, 64, 64) assert isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride ==", "128, 64, 64)) mask = torch.zeros((2, 1, 64, 64)) mask[...,", "in_channels=128, conv_type='gated_conv').cuda() res, offset = neck(x.cuda(), mask.cuda()) assert res.shape ==", "== 48 * 2 def test_deepfill_contextual_attention_neck(): # TODO: add unittest", "20:100, 23:90] = 1. res, offset = neck(x, mask) assert", "= torch.zeros((2, 1, 64, 64)) mask[..., 20:100, 23:90] = 1.", "(2, 2) assert encoder.enc2.conv.out_channels == 48 * 2 def test_deepfill_contextual_attention_neck():", "== (2, 32, 32, 32, 32) if torch.cuda.is_available(): neck.cuda() res,", "64) assert encoder.enc2.stride == (2, 2) assert encoder.enc2.out_channels == 64", "32) if torch.cuda.is_available(): neck.cuda() res, offset = neck(x.cuda(), mask.cuda()) assert", "res, offset = neck(x.cuda(), mask.cuda()) assert res.shape == (2, 128,", "64 assert encoder.enc4.out_channels == 128 if torch.cuda.is_available(): encoder = DeepFillEncoder().cuda()", "channel_factor=0.75).cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x)", "assert encoder.enc4.out_channels == 128 if torch.cuda.is_available(): encoder = DeepFillEncoder().cuda() x", "DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs", "128 if torch.cuda.is_available(): encoder = DeepFillEncoder().cuda() x = torch.randn((2, 5,", "import SimpleGatedConvModule def test_deepfill_enc(): encoder = DeepFillEncoder() x = torch.randn((2,", "256)) outputs = encoder(x) assert isinstance(outputs, dict) assert 'out' in", "= neck(x, mask) assert res.shape == (2, 128, 64, 64)", "neck(x.cuda(), mask.cuda()) assert res.shape == (2, 128, 64, 64) assert", "res = outputs['out'] assert res.shape == (2, 128, 64, 64)", "== (2, 96, 64, 64) assert isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride", "assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2,", "64, 64) assert isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride == (2, 2)", "assert encoder.enc2.conv.stride == (2, 2) assert encoder.enc2.conv.out_channels == 48 *", "DeepFillEncoder() x = torch.randn((2, 5, 256, 256)) outputs = encoder(x)", "64, 64) assert encoder.enc2.out_channels == 32 assert encoder.enc3.out_channels == 64", "assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 64 encoder =", "assert offset.shape == (2, 32, 32, 32, 32) neck =", "96, 64, 64) assert isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride == (2,", "torch.cuda.is_available(): neck.cuda() res, offset = neck(x.cuda(), mask.cuda()) assert res.shape ==", "(2, 128, 64, 64) assert encoder.enc2.out_channels == 32 assert encoder.enc3.out_channels", "for contextual attention module neck = ContextualAttentionNeck(in_channels=128) x = torch.rand((2,", "rights reserved. import torch from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder from", "= DeepFillEncoder() x = torch.randn((2, 5, 256, 256)) outputs =", "x = torch.randn((2, 5, 256, 256)) outputs = encoder(x) assert", "256, 256)) outputs = encoder(x) assert isinstance(outputs, dict) assert 'out'", "res = outputs['out'] assert res.shape == (2, 96, 64, 64)", "= torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x) assert isinstance(outputs,", "48 * 2 def test_deepfill_contextual_attention_neck(): # TODO: add unittest for", "encoder(x) assert isinstance(outputs, dict) assert 'out' in outputs res =", "mask) assert res.shape == (2, 128, 64, 64) assert offset.shape", "ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res, offset = neck(x.cuda(), mask.cuda()) assert res.shape", "assert isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride == (2, 2) assert encoder.enc2.conv.out_channels", "32, 32, 32) neck = ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res, offset", "All rights reserved. import torch from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder", "outputs['out'] assert res.shape == (2, 128, 64, 64) assert encoder.enc2.out_channels", "2) assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x =", "= DeepFillEncoder().cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs =", "mask.cuda()) assert res.shape == (2, 128, 64, 64) assert offset.shape", "(2, 128, 64, 64) assert offset.shape == (2, 32, 32,", "128 encoder = DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x = torch.randn((2, 5,", "128, 64, 64) assert encoder.enc2.out_channels == 32 assert encoder.enc3.out_channels ==", "64)) mask = torch.zeros((2, 1, 64, 64)) mask[..., 20:100, 23:90]", "encoder.enc2.conv.out_channels == 48 * 2 def test_deepfill_contextual_attention_neck(): # TODO: add", "64) assert offset.shape == (2, 32, 32, 32, 32) if", "encoder.enc2.stride == (2, 2) assert encoder.enc2.out_channels == 64 encoder =", "assert res.shape == (2, 128, 64, 64) assert encoder.enc2.stride ==", "= DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs =", "# Copyright (c) OpenMMLab. All rights reserved. import torch from", "= DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2, 5, 256, 256)) outputs =", "== 32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128", "torch.zeros((2, 1, 64, 64)) mask[..., 20:100, 23:90] = 1. res,", "if torch.cuda.is_available(): encoder = DeepFillEncoder().cuda() x = torch.randn((2, 5, 256,", "== 64 assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x", "def test_deepfill_enc(): encoder = DeepFillEncoder() x = torch.randn((2, 5, 256,", "64 assert encoder.enc4.out_channels == 128 encoder = DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda()", "assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2,", "encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda()", "128, 64, 64) assert encoder.enc2.stride == (2, 2) assert encoder.enc2.out_channels", "32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128 encoder", "outputs res = outputs['out'] assert res.shape == (2, 96, 64,", "256)).cuda() outputs = encoder(x) assert isinstance(outputs, dict) assert 'out' in", "neck(x, mask) assert res.shape == (2, 128, 64, 64) assert", "encoder = DeepFillEncoder() x = torch.randn((2, 5, 256, 256)) outputs", "isinstance(outputs, dict) assert 'out' in outputs res = outputs['out'] assert", "== 64 assert encoder.enc4.out_channels == 128 if torch.cuda.is_available(): encoder =", "1. res, offset = neck(x, mask) assert res.shape == (2,", "res.shape == (2, 96, 64, 64) assert isinstance(encoder.enc2, SimpleGatedConvModule) assert", "64, 64) assert encoder.enc2.stride == (2, 2) assert encoder.enc2.out_channels ==", "== 32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 64", "torch.cuda.is_available(): encoder = DeepFillEncoder().cuda() x = torch.randn((2, 5, 256, 256)).cuda()", "DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2, 5, 256, 256)) outputs = encoder(x)", "== (2, 32, 32, 32, 32) neck = ContextualAttentionNeck( in_channels=128,", "encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2, 5,", "OpenMMLab. All rights reserved. import torch from mmedit.models.backbones import ContextualAttentionNeck,", "64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2, 5, 256, 256)).cuda()", "== 64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2, 5, 256,", "= neck(x.cuda(), mask.cuda()) assert res.shape == (2, 128, 64, 64)", "outputs['out'] assert res.shape == (2, 96, 64, 64) assert isinstance(encoder.enc2,", "mask[..., 20:100, 23:90] = 1. res, offset = neck(x, mask)", "64, 64) assert offset.shape == (2, 32, 32, 32, 32)", "(2, 128, 64, 64) assert encoder.enc2.stride == (2, 2) assert", "= DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2, 5, 256, 256)) outputs =", "encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2, 5,", "2) assert encoder.enc2.conv.out_channels == 48 * 2 def test_deepfill_contextual_attention_neck(): #", "module neck = ContextualAttentionNeck(in_channels=128) x = torch.rand((2, 128, 64, 64))", "1, 64, 64)) mask[..., 20:100, 23:90] = 1. res, offset", "32) neck = ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res, offset = neck(x.cuda(),", "DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x)", "encoder = DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x = torch.randn((2, 5, 256,", "assert offset.shape == (2, 32, 32, 32, 32) assert isinstance(neck.conv1,", "test_deepfill_enc(): encoder = DeepFillEncoder() x = torch.randn((2, 5, 256, 256))", "== (2, 128, 64, 64) assert encoder.enc2.out_channels == 32 assert", "256, 256)).cuda() outputs = encoder(x) assert isinstance(outputs, dict) assert 'out'", "(2, 32, 32, 32, 32) if torch.cuda.is_available(): neck.cuda() res, offset", "5, 256, 256)) outputs = encoder(x) assert isinstance(outputs, dict) assert", "torch.rand((2, 128, 64, 64)) mask = torch.zeros((2, 1, 64, 64))", "ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common import SimpleGatedConvModule def test_deepfill_enc(): encoder =", "encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2, 5,", "assert res.shape == (2, 128, 64, 64) assert offset.shape ==", "import ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common import SimpleGatedConvModule def test_deepfill_enc(): encoder", "SimpleGatedConvModule def test_deepfill_enc(): encoder = DeepFillEncoder() x = torch.randn((2, 5,", "64, 64)) mask = torch.zeros((2, 1, 64, 64)) mask[..., 20:100,", "encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2, 5,", "64, 64)) mask[..., 20:100, 23:90] = 1. res, offset =", "32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128 if", "64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2, 5, 256, 256)).cuda()", "contextual attention module neck = ContextualAttentionNeck(in_channels=128) x = torch.rand((2, 128,", "ContextualAttentionNeck(in_channels=128) x = torch.rand((2, 128, 64, 64)) mask = torch.zeros((2,", "encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs", "import torch from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common import", "= outputs['out'] assert res.shape == (2, 128, 64, 64) assert", "= torch.randn((2, 5, 256, 256)) outputs = encoder(x) assert isinstance(outputs,", "(2, 2) assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x", "encoder.enc2.conv.stride == (2, 2) assert encoder.enc2.conv.out_channels == 48 * 2", "attention module neck = ContextualAttentionNeck(in_channels=128) x = torch.rand((2, 128, 64,", "# TODO: add unittest for contextual attention module neck =", "assert encoder.enc2.out_channels == 32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels", "== (2, 128, 64, 64) assert offset.shape == (2, 32,", "= ContextualAttentionNeck(in_channels=128) x = torch.rand((2, 128, 64, 64)) mask =", "mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common import SimpleGatedConvModule def test_deepfill_enc():", "torch.randn((2, 5, 256, 256)) outputs = encoder(x) assert isinstance(outputs, dict)", "from mmedit.models.common import SimpleGatedConvModule def test_deepfill_enc(): encoder = DeepFillEncoder() x", "assert isinstance(outputs, dict) assert 'out' in outputs res = outputs['out']", "from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common import SimpleGatedConvModule def", "64 encoder = DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2, 5, 256, 256))", "unittest for contextual attention module neck = ContextualAttentionNeck(in_channels=128) x =", "= 1. res, offset = neck(x, mask) assert res.shape ==", "assert encoder.enc2.stride == (2, 2) assert encoder.enc2.out_channels == 64 encoder", "<gh_stars>1000+ # Copyright (c) OpenMMLab. All rights reserved. import torch", "neck.cuda() res, offset = neck(x.cuda(), mask.cuda()) assert res.shape == (2,", "32, 32, 32, 32) neck = ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res,", "encoder.enc4.out_channels == 128 if torch.cuda.is_available(): encoder = DeepFillEncoder().cuda() x =", "(2, 32, 32, 32, 32) neck = ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda()", "assert encoder.enc4.out_channels == 128 encoder = DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x", "== (2, 2) assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda()", "= DeepFillEncoder( conv_type='gated_conv', channel_factor=0.75).cuda() x = torch.randn((2, 5, 256, 256)).cuda()", "offset.shape == (2, 32, 32, 32, 32) if torch.cuda.is_available(): neck.cuda()", "* 2 def test_deepfill_contextual_attention_neck(): # TODO: add unittest for contextual", "offset.shape == (2, 32, 32, 32, 32) neck = ContextualAttentionNeck(", "SimpleGatedConvModule) assert encoder.enc2.conv.stride == (2, 2) assert encoder.enc2.conv.out_channels == 48", "32, 32, 32, 32) if torch.cuda.is_available(): neck.cuda() res, offset =", "torch.randn((2, 5, 256, 256)).cuda() outputs = encoder(x) assert isinstance(outputs, dict)", "== 64 encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2, 5, 256,", "(c) OpenMMLab. All rights reserved. import torch from mmedit.models.backbones import", "2) assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv') x =", "assert res.shape == (2, 96, 64, 64) assert isinstance(encoder.enc2, SimpleGatedConvModule)", "== 128 if torch.cuda.is_available(): encoder = DeepFillEncoder().cuda() x = torch.randn((2,", "outputs res = outputs['out'] assert res.shape == (2, 128, 64,", "res.shape == (2, 128, 64, 64) assert offset.shape == (2,", "isinstance(encoder.enc2, SimpleGatedConvModule) assert encoder.enc2.conv.stride == (2, 2) assert encoder.enc2.conv.out_channels ==", "encoder = DeepFillEncoder().cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs", "dict) assert 'out' in outputs res = outputs['out'] assert res.shape", "offset = neck(x, mask) assert res.shape == (2, 128, 64,", "5, 256, 256)).cuda() outputs = encoder(x) assert isinstance(outputs, dict) assert", "encoder.enc2.out_channels == 32 assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels ==", "== (2, 2) assert encoder.enc2.conv.out_channels == 48 * 2 def", "DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2, 5, 256, 256)) outputs = encoder(x)", "encoder = DeepFillEncoder(encoder_type='stage2_conv').cuda() x = torch.randn((2, 5, 256, 256)).cuda() outputs", "== 64 encoder = DeepFillEncoder(encoder_type='stage2_attention') x = torch.randn((2, 5, 256,", "assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128 encoder =", "add unittest for contextual attention module neck = ContextualAttentionNeck(in_channels=128) x", "= ContextualAttentionNeck( in_channels=128, conv_type='gated_conv').cuda() res, offset = neck(x.cuda(), mask.cuda()) assert", "= encoder(x) assert isinstance(outputs, dict) assert 'out' in outputs res", "23:90] = 1. res, offset = neck(x, mask) assert res.shape", "res, offset = neck(x, mask) assert res.shape == (2, 128,", "encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128 if torch.cuda.is_available(): encoder", "assert encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128 if torch.cuda.is_available():", "res.shape == (2, 128, 64, 64) assert encoder.enc2.stride == (2,", "reserved. import torch from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder from mmedit.models.common", "DeepFillEncoder from mmedit.models.common import SimpleGatedConvModule def test_deepfill_enc(): encoder = DeepFillEncoder()", "32, 32) if torch.cuda.is_available(): neck.cuda() res, offset = neck(x.cuda(), mask.cuda())", "x = torch.rand((2, 128, 64, 64)) mask = torch.zeros((2, 1,", "in outputs res = outputs['out'] assert res.shape == (2, 96,", "assert encoder.enc4.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_attention').cuda() x = torch.randn((2,", "2 def test_deepfill_contextual_attention_neck(): # TODO: add unittest for contextual attention", "assert res.shape == (2, 128, 64, 64) assert encoder.enc2.out_channels ==", "res.shape == (2, 128, 64, 64) assert encoder.enc2.out_channels == 32", "64) assert encoder.enc2.out_channels == 32 assert encoder.enc3.out_channels == 64 assert", "128, 64, 64) assert offset.shape == (2, 32, 32, 32,", "conv_type='gated_conv').cuda() res, offset = neck(x.cuda(), mask.cuda()) assert res.shape == (2,", "offset.shape == (2, 32, 32, 32, 32) assert isinstance(neck.conv1, SimpleGatedConvModule)", "encoder.enc3.out_channels == 64 assert encoder.enc4.out_channels == 128 encoder = DeepFillEncoder(", "assert 'out' in outputs res = outputs['out'] assert res.shape ==", "assert encoder.enc2.out_channels == 64 encoder = DeepFillEncoder(encoder_type='stage2_conv') x = torch.randn((2," ]
[ "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('mvp',", "class Migration(migrations.Migration): dependencies = [ ('mvp', '0003_hublocation'), ] operations =", "dependencies = [ ('mvp', '0003_hublocation'), ] operations = [ migrations.RemoveField(", "[ migrations.RemoveField( model_name='hublocation', name='longitude', ), migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433, max_length=90,", "Generated by Django 2.2.13 on 2020-11-27 05:49 from django.db import", "# Generated by Django 2.2.13 on 2020-11-27 05:49 from django.db", "on 2020-11-27 05:49 from django.db import migrations, models class Migration(migrations.Migration):", "05:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "= [ ('mvp', '0003_hublocation'), ] operations = [ migrations.RemoveField( model_name='hublocation',", "models class Migration(migrations.Migration): dependencies = [ ('mvp', '0003_hublocation'), ] operations", "Django 2.2.13 on 2020-11-27 05:49 from django.db import migrations, models", "by Django 2.2.13 on 2020-11-27 05:49 from django.db import migrations,", "'0003_hublocation'), ] operations = [ migrations.RemoveField( model_name='hublocation', name='longitude', ), migrations.AddField(", "), migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433, max_length=90, unique=True, verbose_name='Longitude'), preserve_default=False, ),", "migrations, models class Migration(migrations.Migration): dependencies = [ ('mvp', '0003_hublocation'), ]", "migrations.RemoveField( model_name='hublocation', name='longitude', ), migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433, max_length=90, unique=True,", "[ ('mvp', '0003_hublocation'), ] operations = [ migrations.RemoveField( model_name='hublocation', name='longitude',", "model_name='hublocation', name='longitude', ), migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433, max_length=90, unique=True, verbose_name='Longitude'),", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "] operations = [ migrations.RemoveField( model_name='hublocation', name='longitude', ), migrations.AddField( model_name='hublocation',", "operations = [ migrations.RemoveField( model_name='hublocation', name='longitude', ), migrations.AddField( model_name='hublocation', name='longi',", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('mvp', '0003_hublocation'),", "('mvp', '0003_hublocation'), ] operations = [ migrations.RemoveField( model_name='hublocation', name='longitude', ),", "= [ migrations.RemoveField( model_name='hublocation', name='longitude', ), migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433,", "Migration(migrations.Migration): dependencies = [ ('mvp', '0003_hublocation'), ] operations = [", "migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433, max_length=90, unique=True, verbose_name='Longitude'), preserve_default=False, ), ]", "2.2.13 on 2020-11-27 05:49 from django.db import migrations, models class", "2020-11-27 05:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "name='longitude', ), migrations.AddField( model_name='hublocation', name='longi', field=models.TextField(default=654433, max_length=90, unique=True, verbose_name='Longitude'), preserve_default=False," ]
[ "uuid class Migration(migrations.Migration): dependencies = [ ('adminapp', '0011_faq'), ] operations", "migrations, models import uuid class Migration(migrations.Migration): dependencies = [ ('adminapp',", "[ ('adminapp', '0011_faq'), ] operations = [ migrations.AddField( model_name='faq', name='uuid',", "model_name='faq', name='answer', field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='faq', name='question', field=models.TextField(blank=True,", "] operations = [ migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True),", "field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='faq', name='question', field=models.TextField(blank=True, default=None, null=True),", "2021-07-14 11:55 from django.db import migrations, models import uuid class", "('adminapp', '0011_faq'), ] operations = [ migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True,", "import uuid class Migration(migrations.Migration): dependencies = [ ('adminapp', '0011_faq'), ]", "Generated by Django 3.2.4 on 2021-07-14 11:55 from django.db import", "by Django 3.2.4 on 2021-07-14 11:55 from django.db import migrations,", "), migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='faq',", "migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ), migrations.AlterField( model_name='faq', name='answer',", "3.2.4 on 2021-07-14 11:55 from django.db import migrations, models import", "'0011_faq'), ] operations = [ migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4,", "[ migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ), migrations.AlterField( model_name='faq',", "<filename>adminapp/migrations/0012_auto_20210714_1155.py # Generated by Django 3.2.4 on 2021-07-14 11:55 from", "operations = [ migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ),", "field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ), migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True, default=None, null=True),", "Migration(migrations.Migration): dependencies = [ ('adminapp', '0011_faq'), ] operations = [", "django.db import migrations, models import uuid class Migration(migrations.Migration): dependencies =", "model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ), migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True,", "Django 3.2.4 on 2021-07-14 11:55 from django.db import migrations, models", "on 2021-07-14 11:55 from django.db import migrations, models import uuid", "default=None, null=True), ), migrations.AlterField( model_name='faq', name='question', field=models.TextField(blank=True, default=None, null=True), ),", "import migrations, models import uuid class Migration(migrations.Migration): dependencies = [", "from django.db import migrations, models import uuid class Migration(migrations.Migration): dependencies", "models import uuid class Migration(migrations.Migration): dependencies = [ ('adminapp', '0011_faq'),", "null=True), ), migrations.AlterField( model_name='faq', name='question', field=models.TextField(blank=True, default=None, null=True), ), ]", "name='answer', field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='faq', name='question', field=models.TextField(blank=True, default=None,", "= [ ('adminapp', '0011_faq'), ] operations = [ migrations.AddField( model_name='faq',", "name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ), migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True, default=None,", "migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( model_name='faq', name='question',", "11:55 from django.db import migrations, models import uuid class Migration(migrations.Migration):", "dependencies = [ ('adminapp', '0011_faq'), ] operations = [ migrations.AddField(", "null=True), ), migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField(", "class Migration(migrations.Migration): dependencies = [ ('adminapp', '0011_faq'), ] operations =", "default=uuid.uuid4, null=True), ), migrations.AlterField( model_name='faq', name='answer', field=models.TextField(blank=True, default=None, null=True), ),", "# Generated by Django 3.2.4 on 2021-07-14 11:55 from django.db", "= [ migrations.AddField( model_name='faq', name='uuid', field=models.UUIDField(blank=True, default=uuid.uuid4, null=True), ), migrations.AlterField(" ]
[ "nib import csv from operator import itemgetter # PATH TO", "number corresponding to image case_number = img_filename.split('_')[1] # Write to", "mode='w') as csv_file: csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case", "csv file csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([case_number, dice,", "with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data = json.load(file) with open('json_parsed.csv', mode='w')", "<reponame>andrewsimonds14/Capstone import json import os import nibabel as nib import", "Get tumor dimensions # tumor_size = # Get case number", "[1,2,3] # Indexes at which the voxel size [x,y,z] is", "+ '_0000.nii.gz') label_ni = nib.load(raw_data_path + '/labelsTr/' + img_filename +", "# Get dice score on image dice = img['1']['Dice'] #", "csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([case_number, dice, voxel_size[0], voxel_size[1],", "from operator import itemgetter # PATH TO PREPROCESSED DATA raw_data_path", "Indexes at which the voxel size [x,y,z] is stored #", "to image case_number = img_filename.split('_')[1] # Write to csv file", "as csv_file: csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number',", "json import os import nibabel as nib import csv from", "= nib.load(raw_data_path + '/labelsTr/' + img_filename + '.nii.gz') voxel_size =", "= '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3] # Indexes at which the", "# PATH TO JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data", "dimensions # tumor_size = # Get case number corresponding to", "at which the voxel size [x,y,z] is stored # PATH", "Size-X', 'Voxel Size-Y', 'Voxel Size-Z']) for img in data['results']['all']: #", "to csv file csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([case_number,", "Get dice score on image dice = img['1']['Dice'] # Get", "case number corresponding to image case_number = img_filename.split('_')[1] # Write", "img_filename.split('_')[1] # Write to csv file csv_writer = csv.writer(csv_file, delimiter=',',", "file csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([case_number, dice, voxel_size[0],", "TO JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data = json.load(file)", "itemgetter # PATH TO PREPROCESSED DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind", "= img_filename.split('_')[1] # Write to csv file csv_writer = csv.writer(csv_file,", "csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice Score',", "+ img_filename + '_0000.nii.gz') label_ni = nib.load(raw_data_path + '/labelsTr/' +", "raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3] # Indexes at which", "# PATH TO PREPROCESSED DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind =", "# Write to csv file csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"',", "quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel Size-X', 'Voxel Size-Y', 'Voxel", "import os import nibabel as nib import csv from operator", "'/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3] # Indexes at which the voxel", "PATH TO PREPROCESSED DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3]", "+ '/imagesTr/' + img_filename + '_0000.nii.gz') label_ni = nib.load(raw_data_path +", "import itemgetter # PATH TO PREPROCESSED DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets'", "'Voxel Size-X', 'Voxel Size-Y', 'Voxel Size-Z']) for img in data['results']['all']:", "image img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path + '/imagesTr/' +", "size [x,y,z] is stored # PATH TO JSON FILE with", "Get nifti data on image img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni =", "is stored # PATH TO JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as", "nibabel as nib import csv from operator import itemgetter #", "= (os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path + '/imagesTr/' + img_filename +", "'/imagesTr/' + img_filename + '_0000.nii.gz') label_ni = nib.load(raw_data_path + '/labelsTr/'", "'_0000.nii.gz') label_ni = nib.load(raw_data_path + '/labelsTr/' + img_filename + '.nii.gz')", "import json import os import nibabel as nib import csv", "nib.load(raw_data_path + '/imagesTr/' + img_filename + '_0000.nii.gz') label_ni = nib.load(raw_data_path", "itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor dimensions # tumor_size = # Get", "import nibabel as nib import csv from operator import itemgetter", "os import nibabel as nib import csv from operator import", "score on image dice = img['1']['Dice'] # Get nifti data", "operator import itemgetter # PATH TO PREPROCESSED DATA raw_data_path =", "dice score on image dice = img['1']['Dice'] # Get nifti", "img_ni = nib.load(raw_data_path + '/imagesTr/' + img_filename + '_0000.nii.gz') label_ni", "img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path + '/imagesTr/' + img_filename", "corresponding to image case_number = img_filename.split('_')[1] # Write to csv", "which the voxel size [x,y,z] is stored # PATH TO", "'.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor dimensions # tumor_size", "file: data = json.load(file) with open('json_parsed.csv', mode='w') as csv_file: csv_writer", "data on image img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path +", "'Voxel Size-Z']) for img in data['results']['all']: # Get dice score", "data = json.load(file) with open('json_parsed.csv', mode='w') as csv_file: csv_writer =", "+ img_filename + '.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor", "tumor dimensions # tumor_size = # Get case number corresponding", "PREPROCESSED DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3] # Indexes", "Write to csv file csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)", "import csv from operator import itemgetter # PATH TO PREPROCESSED", "'Voxel Size-Y', 'Voxel Size-Z']) for img in data['results']['all']: # Get", "'/labelsTr/' + img_filename + '.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get", "label_ni = nib.load(raw_data_path + '/labelsTr/' + img_filename + '.nii.gz') voxel_size", "nib.load(raw_data_path + '/labelsTr/' + img_filename + '.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"])", "csv_file: csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice", "= json.load(file) with open('json_parsed.csv', mode='w') as csv_file: csv_writer = csv.writer(csv_file,", "data['results']['all']: # Get dice score on image dice = img['1']['Dice']", "+ '/labelsTr/' + img_filename + '.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) #", "img in data['results']['all']: # Get dice score on image dice", "# Get nifti data on image img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni", "json.load(file) with open('json_parsed.csv', mode='w') as csv_file: csv_writer = csv.writer(csv_file, delimiter=',',", "csv from operator import itemgetter # PATH TO PREPROCESSED DATA", "= csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel", "DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3] # Indexes at", "Get case number corresponding to image case_number = img_filename.split('_')[1] #", "csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel Size-X',", "Size-Y', 'Voxel Size-Z']) for img in data['results']['all']: # Get dice", "tumor_size = # Get case number corresponding to image case_number", "= nib.load(raw_data_path + '/imagesTr/' + img_filename + '_0000.nii.gz') label_ni =", "= # Get case number corresponding to image case_number =", "# Indexes at which the voxel size [x,y,z] is stored", "delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel Size-X', 'Voxel", "FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data = json.load(file) with open('json_parsed.csv',", "# Get case number corresponding to image case_number = img_filename.split('_')[1]", "'Dice Score', 'Voxel Size-X', 'Voxel Size-Y', 'Voxel Size-Z']) for img", "open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data = json.load(file) with open('json_parsed.csv', mode='w') as", "in data['results']['all']: # Get dice score on image dice =", "on image dice = img['1']['Dice'] # Get nifti data on", "img['1']['Dice'] # Get nifti data on image img_filename = (os.path.basename(img['reference']).split('.'))[0]", "+ '.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor dimensions #", "= [1,2,3] # Indexes at which the voxel size [x,y,z]", "stored # PATH TO JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file:", "[x,y,z] is stored # PATH TO JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json')", "as file: data = json.load(file) with open('json_parsed.csv', mode='w') as csv_file:", "csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel Size-X', 'Voxel Size-Y', 'Voxel Size-Z'])", "with open('json_parsed.csv', mode='w') as csv_file: csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"',", "open('json_parsed.csv', mode='w') as csv_file: csv_writer = csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)", "on image img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path + '/imagesTr/'", "voxel size [x,y,z] is stored # PATH TO JSON FILE", "voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor dimensions # tumor_size =", "JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data = json.load(file) with", "quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel Size-X', 'Voxel Size-Y',", "# Get tumor dimensions # tumor_size = # Get case", "img_filename + '.nii.gz') voxel_size = itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor dimensions", "img_filename + '_0000.nii.gz') label_ni = nib.load(raw_data_path + '/labelsTr/' + img_filename", "(os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path + '/imagesTr/' + img_filename + '_0000.nii.gz')", "as nib import csv from operator import itemgetter # PATH", "= csv.writer(csv_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([case_number, dice, voxel_size[0], voxel_size[1], voxel_size[2]])", "image dice = img['1']['Dice'] # Get nifti data on image", "for img in data['results']['all']: # Get dice score on image", "= itemgetter(*pixdim_ind)(img_ni.header[\"pixdim\"]) # Get tumor dimensions # tumor_size = #", "Score', 'Voxel Size-X', 'Voxel Size-Y', 'Voxel Size-Z']) for img in", "case_number = img_filename.split('_')[1] # Write to csv file csv_writer =", "nifti data on image img_filename = (os.path.basename(img['reference']).split('.'))[0] img_ni = nib.load(raw_data_path", "the voxel size [x,y,z] is stored # PATH TO JSON", "dice = img['1']['Dice'] # Get nifti data on image img_filename", "pixdim_ind = [1,2,3] # Indexes at which the voxel size", "= img['1']['Dice'] # Get nifti data on image img_filename =", "image case_number = img_filename.split('_')[1] # Write to csv file csv_writer", "# tumor_size = # Get case number corresponding to image", "Size-Z']) for img in data['results']['all']: # Get dice score on", "Number', 'Dice Score', 'Voxel Size-X', 'Voxel Size-Y', 'Voxel Size-Z']) for", "PATH TO JSON FILE with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file: data =", "TO PREPROCESSED DATA raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets' pixdim_ind = [1,2,3] #" ]
[ "@gen.sync_engine def test(self, value): for n in range(2): self.v +=", "callback): callback(test) def queue_async(test, callback): global _queue _queue.append((callback, test)) def", "deque() def run_sync(test, callback): callback(test) def queue_async(test, callback): global _queue", "callback = _queue.popleft() callback[0](callback[1]) def run_async(): global _queue while True:", "DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['3',", ":license: Apache, see LICENSE for more details. \"\"\" from collections", "[] self.queue_type = queue_type @gen.sync_engine def test(self, value): self.v.append((yield gen.Task(self.queue_type,", "step_async() except IndexError: break def run_async_oor(): global _queue while True:", "see LICENSE for more details. \"\"\" from collections import deque", "details. \"\"\" from collections import deque from nose.tools import eq_", "LICENSE for more details. \"\"\" from collections import deque from", "global _queue _queue = deque() def run_sync(test, callback): callback(test) def", "def run_sync(test, callback): callback(test) def queue_async(test, callback): global _queue _queue.append((callback,", "def test_sync_queue(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async()", "Verify value eq_(dummy.v, ['1', '2', '3']) def test_sync_queue_oor(): init_environment() dummy", "queue_type @gen.sync_engine def test(self, value): for n in range(2): self.v", "Dummy(queue_async) dummy.test('test') run_async() # Verify value eq_(dummy.v, 'test') def test_sync_queue():", "# Verify value eq_(dummy.v, ['1', '2', '3']) def test_sync_queue_oor(): init_environment()", "try: step_async() except IndexError: break def run_async_oor(): global _queue while", "value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyLoop(): def __init__(self, queue_type): self.v", "def test_sync_queue_oor(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor()", "def test_async_queue_oor(): init_environment() dummy = DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor()", "__init__(self, queue_type): self.v = None self.queue_type = queue_type @gen.sync_engine def", "tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011 by the <NAME>, see AUTHORS", "see AUTHORS for more details. :license: Apache, see LICENSE for", "class DummyList(): def __init__(self, queue_type): self.v = [] self.queue_type =", "more details. :license: Apache, see LICENSE for more details. \"\"\"", "-*- coding: utf-8 -*- \"\"\" tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011", "while True: try: step_async() except IndexError: break def run_async_oor(): global", "IndexError: break class Dummy(): def __init__(self, queue_type): self.v = None", "IndexError: break def run_async_oor(): global _queue while True: try: callback", "'3']) def test_async_queue_oor(): init_environment() dummy = DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3')", "__init__(self, queue_type): self.v = 0 self.queue_type = queue_type @gen.sync_engine def", "queue_type): self.v = 0 self.queue_type = queue_type @gen.sync_engine def test(self,", "eq_(dummy.v, 'test') def test_async(): init_environment() dummy = Dummy(queue_async) dummy.test('test') run_async()", "run_async() # Verify value eq_(dummy.v, ['1', '2', '3']) def test_sync_queue_oor():", "Verify value eq_(dummy.v, ['1', '2', '3']) def test_async_queue_oor(): init_environment() dummy", "Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test') def test_async(): init_environment() dummy = Dummy(queue_async)", "-*- \"\"\" tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011 by the <NAME>,", "import deque from nose.tools import eq_ from tornadio2 import gen", "AUTHORS for more details. :license: Apache, see LICENSE for more", "_queue _queue.append((callback, test)) def step_async(): callback = _queue.popleft() callback[0](callback[1]) def", "callback = _queue.pop() callback[0](callback[1]) except IndexError: break class Dummy(): def", "dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['1', '2',", "_queue while True: try: step_async() except IndexError: break def run_async_oor():", "self.v = [] self.queue_type = queue_type @gen.engine def test(self, value):", "queue_type @gen.sync_engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyListOutOfOrder():", "value): self.v = yield gen.Task(self.queue_type, value) class DummyList(): def __init__(self,", "__init__(self, queue_type): self.v = [] self.queue_type = queue_type @gen.engine def", "n in range(2): self.v += (yield gen.Task(self.queue_type, value)) def test():", "_queue.pop() callback[0](callback[1]) except IndexError: break class Dummy(): def __init__(self, queue_type):", "dummy.test('test') eq_(dummy.v, 'test') def test_async(): init_environment() dummy = Dummy(queue_async) dummy.test('test')", "= DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v,", "class Dummy(): def __init__(self, queue_type): self.v = None self.queue_type =", "from tornadio2 import gen _queue = None def init_environment(): global", "step_async(): callback = _queue.popleft() callback[0](callback[1]) def run_async(): global _queue while", "dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value", "test_sync_queue(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async() #", "dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['1', '2', '3'])", "import gen _queue = None def init_environment(): global _queue _queue", "self.queue_type = queue_type @gen.sync_engine def test(self, value): self.v = yield", "= DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async() # Verify value eq_(dummy.v,", "def test(self, value): self.v = yield gen.Task(self.queue_type, value) class DummyList():", "value eq_(dummy.v, ['1', '2', '3']) def test_async_queue_oor(): init_environment() dummy =", "range(2): self.v += (yield gen.Task(self.queue_type, value)) def test(): init_environment() dummy", "def step_async(): callback = _queue.popleft() callback[0](callback[1]) def run_async(): global _queue", "dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['3', '2', '1'])", "'test') def test_async(): init_environment() dummy = Dummy(queue_async) dummy.test('test') run_async() #", "callback(test) def queue_async(test, callback): global _queue _queue.append((callback, test)) def step_async():", "callback[0](callback[1]) except IndexError: break class Dummy(): def __init__(self, queue_type): self.v", "self.v = [] self.queue_type = queue_type @gen.sync_engine def test(self, value):", "def queue_async(test, callback): global _queue _queue.append((callback, test)) def step_async(): callback", "queue_type): self.v = [] self.queue_type = queue_type @gen.engine def test(self,", "coding: utf-8 -*- \"\"\" tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011 by", "yield gen.Task(self.queue_type, value) class DummyList(): def __init__(self, queue_type): self.v =", "dummy.test('1') dummy.test('2') dummy.test('3') run_async() # Verify value eq_(dummy.v, ['1', '2',", "test(self, value): for n in range(2): self.v += (yield gen.Task(self.queue_type,", "value)) def test(): init_environment() dummy = Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test')", "= queue_type @gen.engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class", "test_sync_queue_oor(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() #", "dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['1', '2', '3']) def", "self.queue_type = queue_type @gen.sync_engine def test(self, value): for n in", "= None def init_environment(): global _queue _queue = deque() def", "for more details. :license: Apache, see LICENSE for more details.", "init_environment() dummy = Dummy(queue_async) dummy.test('test') run_async() # Verify value eq_(dummy.v,", "DummyListOutOfOrder(): def __init__(self, queue_type): self.v = [] self.queue_type = queue_type", "value))) class DummyLoop(): def __init__(self, queue_type): self.v = 0 self.queue_type", "'2', '3']) def test_sync_queue_oor(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2')", "= _queue.popleft() callback[0](callback[1]) def run_async(): global _queue while True: try:", "= [] self.queue_type = queue_type @gen.sync_engine def test(self, value): self.v.append((yield", "dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async() # Verify value", "for n in range(2): self.v += (yield gen.Task(self.queue_type, value)) def", "value eq_(dummy.v, ['1', '2', '3']) def test_sync_queue_oor(): init_environment() dummy =", "_queue = None def init_environment(): global _queue _queue = deque()", "self.v = 0 self.queue_type = queue_type @gen.sync_engine def test(self, value):", "= Dummy(queue_async) dummy.test('test') run_async() # Verify value eq_(dummy.v, 'test') def", "self.v.append((yield gen.Task(self.queue_type, value))) class DummyLoop(): def __init__(self, queue_type): self.v =", "test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyListOutOfOrder(): def __init__(self, queue_type):", "callback): global _queue _queue.append((callback, test)) def step_async(): callback = _queue.popleft()", "(c) 2011 by the <NAME>, see AUTHORS for more details.", "_queue.popleft() callback[0](callback[1]) def run_async(): global _queue while True: try: step_async()", "run_async() # Verify value eq_(dummy.v, 'test') def test_sync_queue(): init_environment() dummy", "eq_(dummy.v, ['1', '2', '3']) def test_sync_queue_oor(): init_environment() dummy = DummyList(queue_async)", "callback[0](callback[1]) def run_async(): global _queue while True: try: step_async() except", "value): for n in range(2): self.v += (yield gen.Task(self.queue_type, value))", "2011 by the <NAME>, see AUTHORS for more details. :license:", "self.v.append((yield gen.Task(self.queue_type, value))) class DummyListOutOfOrder(): def __init__(self, queue_type): self.v =", "0 self.queue_type = queue_type @gen.sync_engine def test(self, value): for n", "dummy.test('test') run_async() # Verify value eq_(dummy.v, 'test') def test_sync_queue(): init_environment()", "in range(2): self.v += (yield gen.Task(self.queue_type, value)) def test(): init_environment()", "test_async(): init_environment() dummy = Dummy(queue_async) dummy.test('test') run_async() # Verify value", "nose.tools import eq_ from tornadio2 import gen _queue = None", "DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['1',", "\"\"\" from collections import deque from nose.tools import eq_ from", "['1', '2', '3']) def test_sync_queue_oor(): init_environment() dummy = DummyList(queue_async) dummy.test('1')", "more details. \"\"\" from collections import deque from nose.tools import", "the <NAME>, see AUTHORS for more details. :license: Apache, see", "<NAME>, see AUTHORS for more details. :license: Apache, see LICENSE", "run_async(): global _queue while True: try: step_async() except IndexError: break", "'3']) def test_sync_queue_oor(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3')", "run_sync(test, callback): callback(test) def queue_async(test, callback): global _queue _queue.append((callback, test))", "collections import deque from nose.tools import eq_ from tornadio2 import", "gen.Task(self.queue_type, value)) def test(): init_environment() dummy = Dummy(run_sync) dummy.test('test') eq_(dummy.v,", "value eq_(dummy.v, 'test') def test_sync_queue(): init_environment() dummy = DummyList(queue_async) dummy.test('1')", "eq_(dummy.v, ['1', '2', '3']) def test_async_queue_oor(): init_environment() dummy = DummyListOutOfOrder(queue_async)", "dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v, ['3', '2',", "def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyListOutOfOrder(): def __init__(self,", "def test(): init_environment() dummy = Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test') def", "test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyLoop(): def __init__(self, queue_type):", "_queue.append((callback, test)) def step_async(): callback = _queue.popleft() callback[0](callback[1]) def run_async():", "for more details. \"\"\" from collections import deque from nose.tools", "True: try: step_async() except IndexError: break def run_async_oor(): global _queue", "= yield gen.Task(self.queue_type, value) class DummyList(): def __init__(self, queue_type): self.v", "eq_ from tornadio2 import gen _queue = None def init_environment():", "run_async_oor(): global _queue while True: try: callback = _queue.pop() callback[0](callback[1])", "[] self.queue_type = queue_type @gen.engine def test(self, value): self.v.append((yield gen.Task(self.queue_type,", "except IndexError: break class Dummy(): def __init__(self, queue_type): self.v =", "while True: try: callback = _queue.pop() callback[0](callback[1]) except IndexError: break", "break class Dummy(): def __init__(self, queue_type): self.v = None self.queue_type", "= Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test') def test_async(): init_environment() dummy =", "run_async_oor() # Verify value eq_(dummy.v, ['1', '2', '3']) def test_async_queue_oor():", "# Verify value eq_(dummy.v, ['1', '2', '3']) def test_async_queue_oor(): init_environment()", "global _queue _queue.append((callback, test)) def step_async(): callback = _queue.popleft() callback[0](callback[1])", "Apache, see LICENSE for more details. \"\"\" from collections import", "~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011 by the <NAME>, see AUTHORS for", "init_environment(): global _queue _queue = deque() def run_sync(test, callback): callback(test)", "DummyList(): def __init__(self, queue_type): self.v = [] self.queue_type = queue_type", "value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyListOutOfOrder(): def __init__(self, queue_type): self.v", "def __init__(self, queue_type): self.v = 0 self.queue_type = queue_type @gen.sync_engine", "+= (yield gen.Task(self.queue_type, value)) def test(): init_environment() dummy = Dummy(run_sync)", "dummy.test('2') dummy.test('3') run_async() # Verify value eq_(dummy.v, ['1', '2', '3'])", "dummy = DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value", "gen _queue = None def init_environment(): global _queue _queue =", "def run_async_oor(): global _queue while True: try: callback = _queue.pop()", "_queue _queue = deque() def run_sync(test, callback): callback(test) def queue_async(test,", "test(): init_environment() dummy = Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test') def test_async():", "def run_async(): global _queue while True: try: step_async() except IndexError:", "global _queue while True: try: step_async() except IndexError: break def", "from nose.tools import eq_ from tornadio2 import gen _queue =", "queue_type @gen.sync_engine def test(self, value): self.v = yield gen.Task(self.queue_type, value)", "= deque() def run_sync(test, callback): callback(test) def queue_async(test, callback): global", "class DummyLoop(): def __init__(self, queue_type): self.v = 0 self.queue_type =", "@gen.sync_engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyListOutOfOrder(): def", "# Verify value eq_(dummy.v, 'test') def test_sync_queue(): init_environment() dummy =", "'test') def test_sync_queue(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3')", "dummy = Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test') def test_async(): init_environment() dummy", "def __init__(self, queue_type): self.v = None self.queue_type = queue_type @gen.sync_engine", "dummy = Dummy(queue_async) dummy.test('test') run_async() # Verify value eq_(dummy.v, 'test')", "= _queue.pop() callback[0](callback[1]) except IndexError: break class Dummy(): def __init__(self,", "DummyLoop(): def __init__(self, queue_type): self.v = 0 self.queue_type = queue_type", "def init_environment(): global _queue _queue = deque() def run_sync(test, callback):", "test(self, value): self.v = yield gen.Task(self.queue_type, value) class DummyList(): def", "self.queue_type = queue_type @gen.sync_engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value)))", "import eq_ from tornadio2 import gen _queue = None def", "queue_type @gen.engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyLoop():", "= DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify value eq_(dummy.v,", "_queue while True: try: callback = _queue.pop() callback[0](callback[1]) except IndexError:", "global _queue while True: try: callback = _queue.pop() callback[0](callback[1]) except", "'2', '3']) def test_async_queue_oor(): init_environment() dummy = DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2')", "class DummyListOutOfOrder(): def __init__(self, queue_type): self.v = [] self.queue_type =", "= queue_type @gen.sync_engine def test(self, value): self.v = yield gen.Task(self.queue_type,", "def __init__(self, queue_type): self.v = [] self.queue_type = queue_type @gen.engine", "def __init__(self, queue_type): self.v = [] self.queue_type = queue_type @gen.sync_engine", "test_async_queue_oor(): init_environment() dummy = DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() #", "gen.Task(self.queue_type, value))) class DummyListOutOfOrder(): def __init__(self, queue_type): self.v = []", "deque from nose.tools import eq_ from tornadio2 import gen _queue", "= 0 self.queue_type = queue_type @gen.sync_engine def test(self, value): for", "Dummy(): def __init__(self, queue_type): self.v = None self.queue_type = queue_type", "self.queue_type = queue_type @gen.engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value)))", "_queue = deque() def run_sync(test, callback): callback(test) def queue_async(test, callback):", "@gen.sync_engine def test(self, value): self.v = yield gen.Task(self.queue_type, value) class", "gen.Task(self.queue_type, value) class DummyList(): def __init__(self, queue_type): self.v = []", "True: try: callback = _queue.pop() callback[0](callback[1]) except IndexError: break class", "self.v = yield gen.Task(self.queue_type, value) class DummyList(): def __init__(self, queue_type):", "def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyLoop(): def __init__(self,", "details. :license: Apache, see LICENSE for more details. \"\"\" from", "self.v += (yield gen.Task(self.queue_type, value)) def test(): init_environment() dummy =", "= None self.queue_type = queue_type @gen.sync_engine def test(self, value): self.v", "queue_type): self.v = None self.queue_type = queue_type @gen.sync_engine def test(self,", "tornadio2 import gen _queue = None def init_environment(): global _queue", "def test_async(): init_environment() dummy = Dummy(queue_async) dummy.test('test') run_async() # Verify", "from collections import deque from nose.tools import eq_ from tornadio2", "DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async() # Verify value eq_(dummy.v, ['1',", "def test(self, value): for n in range(2): self.v += (yield", "self.v = None self.queue_type = queue_type @gen.sync_engine def test(self, value):", "= queue_type @gen.sync_engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class", "init_environment() dummy = Dummy(run_sync) dummy.test('test') eq_(dummy.v, 'test') def test_async(): init_environment()", "init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify", "dummy.test('3') run_async() # Verify value eq_(dummy.v, ['1', '2', '3']) def", "Verify value eq_(dummy.v, 'test') def test_sync_queue(): init_environment() dummy = DummyList(queue_async)", "= [] self.queue_type = queue_type @gen.engine def test(self, value): self.v.append((yield", "(yield gen.Task(self.queue_type, value)) def test(): init_environment() dummy = Dummy(run_sync) dummy.test('test')", "None def init_environment(): global _queue _queue = deque() def run_sync(test,", "__init__(self, queue_type): self.v = [] self.queue_type = queue_type @gen.sync_engine def", "break def run_async_oor(): global _queue while True: try: callback =", "None self.queue_type = queue_type @gen.sync_engine def test(self, value): self.v =", "eq_(dummy.v, 'test') def test_sync_queue(): init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2')", "queue_async(test, callback): global _queue _queue.append((callback, test)) def step_async(): callback =", "init_environment() dummy = DummyList(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async() # Verify", ":copyright: (c) 2011 by the <NAME>, see AUTHORS for more", "# -*- coding: utf-8 -*- \"\"\" tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c)", "utf-8 -*- \"\"\" tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011 by the", "gen.Task(self.queue_type, value))) class DummyLoop(): def __init__(self, queue_type): self.v = 0", "['1', '2', '3']) def test_async_queue_oor(): init_environment() dummy = DummyListOutOfOrder(queue_async) dummy.test('1')", "test)) def step_async(): callback = _queue.popleft() callback[0](callback[1]) def run_async(): global", "= queue_type @gen.sync_engine def test(self, value): for n in range(2):", "value) class DummyList(): def __init__(self, queue_type): self.v = [] self.queue_type", "init_environment() dummy = DummyListOutOfOrder(queue_async) dummy.test('1') dummy.test('2') dummy.test('3') run_async_oor() # Verify", "queue_type): self.v = [] self.queue_type = queue_type @gen.sync_engine def test(self,", "@gen.engine def test(self, value): self.v.append((yield gen.Task(self.queue_type, value))) class DummyLoop(): def", "by the <NAME>, see AUTHORS for more details. :license: Apache,", "except IndexError: break def run_async_oor(): global _queue while True: try:", "\"\"\" tornadio2.tests.gen ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011 by the <NAME>, see", "try: callback = _queue.pop() callback[0](callback[1]) except IndexError: break class Dummy():", "value))) class DummyListOutOfOrder(): def __init__(self, queue_type): self.v = [] self.queue_type" ]
[ "(numpy array, output of ANTs pieline) \"\"\" t1_v = normalize(t1_v,", "import FCDenseNet57 self.Mnclasses = 4 self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt =", "=================================\") self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device) #======================================================================================== # Tir3D model................... from", "for z in (range(z_min, z_max - prediction_size, prediction_size)): high =", "low[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] #", "transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1, 2) return outs", "hl_pad-x), max(0, hl_pad-x) + vxt - vxf tyf, tyt =", "data in brats format other with any random format step", "finds the brain, Whole tumor region t1_v = t1 volume", "vzf low[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt]", "classification Dual Path way network 3. MNet2D 57 layered convolutional", "t1 = nib.load(os.path.join(path, name + 't1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path, name", "# ========================================================================= low1[0] = [resize(low[0, i, :, :, :], (resize_to,", "device = \"cpu\" map_location = device #======================================================================================== ckpt_tir2D = os.path.join(home,", "+ 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg]", "tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 3, txf:txt, tyf:tyt, tzf:tzt]", "t1 volume (numpy array) t1c_v = t1c volume (numpy array)", "1], low1[0, 2], low1[0, 3] = low1[0, 0] + flair[0,0,0],", "x_max - prediction_size, prediction_size)): for y in (range(y_min, y_max -", "y:y+N, z:z+N] high[0, 3, :, :, :] = t1ce[x:x+N, y:y+N,", "= brain, whole tumor mask (numpy array, output of ANTs", "= max(0, x-ll_pad), min(shape[0], x+lr_pad) vyf, vyt = max(0, y-ll_pad),", "+ 16 resize_to = int(prediction_size ** 0.5) + 16 low_res_size", "# prediction functions..................... bin_path = os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\" class", "networks \"\"\" def __init__(self, quick = False, ants_path = bin_path):", "os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+ mask_path +' '+ mask_path +'", "self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device)", "save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred def get_segmentation_brats(self, path, save", "= True): \"\"\" Generates segmentation for the data in BraTs", "2], high[0, 3] = high[0, 0] + flair[0,0,0], high[0, 1]", "= device self.quick = quick self.ants_path = ants_path def get_ants_mask(self,", "= convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2, flair,", "t1ce, t2, flair, brain_mask) # mask = np.swapaxes(mask,1, 0) if", "low[0, 2] + t1[0,0,0], low[0, 2] + t1ce[0,0,0] low1[0, 0],", "## neccessary if batch size == 1 transformed_array = transformed_array.to(self.device)", "= FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D", "if torch.cuda.is_available() else \"cpu\") # device = \"cpu\" map_location =", "t2, flair, brain_mask) # mask = np.swapaxes(mask,1, 0) if not", "np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice", "= expanduser(\"~\") #======================================================================================== # prediction functions..................... bin_path = os.path.join('/opt/ANTs/bin/') class", "pred_size=prediction_size).detach().cpu()) pred = pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0]", "class classification 4. Tir3Dnet 57 layered 3D convolutional network for", "3 self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses) ## intialize the graph", "# ========================================================================= vxf, vxt = max(0, x-hl_pad), min(shape[0], x+hr_pad) vyf,", "of patient data. to main platform for segmentation mask estimation", "FCDenseNet103(n_classes = self.ABLnclasses) ## intialize the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict'])", "outs = np.swapaxes(generated_output,1, 2) return outs def get_segmentation(self, t1_path, t2_path,", "y_min, y_max, z_min, z_max = x_min, min(shape[0] - N, x_max),", "self.ants_path = ants_path def get_ants_mask(self, t1_path): \"\"\" We make use", "returns : segmentation mask \"\"\" name = path.split(\"/\")[-1] + \"_\"", ":, :] = t1ce[x:x+N, y:y+N, z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float() pred", "4, high_res_size, high_res_size, high_res_size)) low = np.zeros((1, 4, low_res_size, low_res_size,", "= pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0] final_prediction =", "flair_v, brain_mask): \"\"\" ABLnetwork output, finds the brain, Whole tumor", "(\" + strftime(\"%a, %d %b %Y %H:%M:%S +0000\", gmtime()) +", "obtained by aspect ratio calculation high_res_size = prediction_size + 16", "N, x_max), y_min, min(shape[1] - N, y_max), z_min, min(shape[2] -", "final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume): \"\"\" output of 2D", "the mask in the same location as t1 data directory", "'.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL", "of ants framework for generalized skull stripping t1_path: t1 volume", "0.456, 0.406], [0.229, 0.224, 0.225]) transformList = [] transformList.append(transforms.ToTensor()) transformList.append(normalize)", "def get_segmentation(self, t1_path, t2_path, t1ce_path, flair_path, save_path = None): \"\"\"", "resize from torchvision import transforms from time import gmtime, strftime", "= t1.shape # to exclude batch_size final_prediction = np.zeros((self.T3Dnclasses, shape[0],", "layered 3D convolutional network for inner class classification more on", "= \"cpu\" map_location = device #======================================================================================== ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar')", "volume (numpy array) t1c_v = t1c volume (numpy array) t2_v", "more on training details and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick:", "3D tiramisu model (tir3Dnet) mask = numpy array output of", "1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' ME '+ mask_path", "tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 2, txf:txt, tyf:tyt, tzf:tzt]", "= FCDenseNet103(n_classes = self.ABLnclasses) ## intialize the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location)", "= max(0, z-ll_pad), min(shape[2], z+lr_pad) txf, txt = max(0, ll_pad-x),", "= self.get_ants_mask(t2_path) mask = self.get_localization(t1, t1ce, t2, flair, brain_mask) #", "for the patient data in brats format other with any", "volume (numpy array) brain_mask = brain, whole tumor mask (numpy", "- vyf tzf, tzt = max(0, ll_pad-z), max(0, ll_pad-z) +", "= max(0, x-hl_pad), min(shape[0], x+hr_pad) vyf, vyt = max(0, y-hl_pad),", "maskvolume (numpy uint8 type) \"\"\" mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path", "utf-8 -*- # # author: <NAME> # contact: <EMAIL> import", "= np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice", "%Y %H:%M:%S +0000\", gmtime()) + \") Working on: \", path)", "vxf, vxt = max(0, x-ll_pad), min(shape[0], x+lr_pad) vyf, vyt =", "= numpy array output of ABLnet N = patch size", "tumor region t1_v = t1 volume (numpy array) t1c_v =", "fill the model with trained params print (\"=================================== ABLNET2D Loaded", "Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred = pred.numpy() final_prediction[:,", "'+ mask_path +' 1') os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+ mask_path", "y-ll_pad), min(shape[1], y+lr_pad) vzf, vzt = max(0, z-ll_pad), min(shape[2], z+lr_pad)", "txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 3, txf:txt,", "flair, brain_mask) mask = np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits", "1], high[0, 2], high[0, 3] = high[0, 0] + flair[0,0,0],", "N, N)) high[0, 0, :, :, :] = flair[x:x+N, y:y+N,", "= low[0, 0] + flair[0,0,0], low[0, 1] + t2[0,0,0], low[0,", "y:y+N, z:z+N] high[0, 2, :, :, :] = t1[x:x+N, y:y+N,", "tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices]))", "mask \"\"\" name = path.split(\"/\")[-1] + \"_\" flair = nib.load(os.path.join(path,", "self.get_localization(t1, t1ce, t2, flair, brain_mask) # mask = np.swapaxes(mask,1, 0)", "step followed for in estimation of segmentation mask 1. ABLnet", "- vyf tzf, tzt = max(0, hl_pad-z), max(0, hl_pad-z) +", "= np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2] =", "= perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred,", "from skimage.transform import resize from torchvision import transforms from time", "= np.zeros((1, 4, resize_to, resize_to, resize_to)) high[0, 0], high[0, 1],", "= 5 self.BNET3Dnet = BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict'])", "import Variable from skimage.transform import resize from torchvision import transforms", "# ========================================================================= vxf, vxt = max(0, x-ll_pad), min(shape[0], x+lr_pad) vyf,", "= t1.shape # to exclude batch_size final_prediction = np.zeros((self.B3Dnclasses, shape[0],", "inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2, flair, brain_mask, mask=None, prediction_size = 9):", "classification more on training details and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) =========================", "57 layered 3D convolutional network for inner class classification more", "low1[0, 0] + flair[0,0,0], low1[0, 1] + t2[0,0,0], low1[0, 2]", "import BrainNet_3D_Inception self.B3Dnclasses = 5 self.BNET3Dnet = BrainNet_3D_Inception() ckpt =", "tzf, tzt = max(0, ll_pad-z), max(0, ll_pad-z) + vzt -", "max(0, z-ll_pad), min(shape[2], z+lr_pad) txf, txt = max(0, ll_pad-x), max(0,", "torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D Loaded ===================================\") self.MNET2D.eval() self.MNET2D", "logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x 240 generated_output_logits[:,:,:, slices]", "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") # device =", "t2, flair) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits =", "pred.data.numpy() final_prediction[:, x:x+N, y:y+N, z:z+N] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction)", "vzf:vzt] low[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt]", "t1ce, t2, flair, brain_mask, mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2,", "= [] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices", "None): \"\"\" Generates segmentation for the data not in brats", "import SimpleITK as sitk import numpy as np import nibabel", "tumorSeg(): \"\"\" class performs segmentation for a given sequence of", "brain_mask): \"\"\" ABLnetwork output, finds the brain, Whole tumor region", "(\"[INFO: DeepBrainSeg] (\" + strftime(\"%a, %d %b %Y %H:%M:%S +0000\",", "= torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy() final_prediction[:, x:x+N, y:y+N, z:z+N] =", "pieline) \"\"\" t1_v = normalize(t1_v, brain_mask) t1c_v = normalize(t1c_v, brain_mask)", "class performs segmentation for a given sequence of patient data.", "segmentation mask 1. ABLnet for reducing false positives outside the", "tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= low1[0] = [resize(low[0,", "[] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in", "16 resize_to = int(prediction_size ** 0.5) + 16 low_res_size =", "by aspect ratio calculation high_res_size = prediction_size + 16 resize_to", "prediction_size, prediction_size)): for z in (range(z_min, z_max - prediction_size, prediction_size)):", "device #======================================================================================== ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar')", "= torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1, 2) return outs def get_segmentation(self,", "flair, brain_mask) # mask = np.swapaxes(mask,1, 0) if not self.quick:", "= self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1,", "mask_path +' 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' ME", "final_pred = adjust_classes(final_pred) if save: save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return", "segmentation.............. from .models.modelABL import FCDenseNet103 self.ABLnclasses = 3 self.ABLnet =", "flair[0,0,0], high[0, 1] + t2[0,0,0], high[0, 2] + t1[0,0,0], high[0,", "= normalize(t1, brain_mask) t1ce = normalize(t1ce, brain_mask) t2 = normalize(t2,", "vxt = max(0, x-hl_pad), min(shape[0], x+hr_pad) vyf, vyt = max(0,", "x-hl_pad), min(shape[0], x+hr_pad) vyf, vyt = max(0, y-hl_pad), min(shape[1], y+hr_pad)", "1 transformed_array = transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240", "Dual path network (BNet3D) else copmutes an ensumble over all", "t1_slice = np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice array[:,:,1]", "self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask) final_predictionBNET3D_logits", "nib.load(flair_path).get_data() affine = nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path) mask = self.get_localization(t1,", "max(0, ll_pad-z), max(0, ll_pad-z) + vzt - vzf low[0, 0,", "array, output of ANTs pieline) \"\"\" t1_v = normalize(t1_v, brain_mask)", "on Dual path network (BNet3D) else copmutes an ensumble over", "path network (BNet3D) else copmutes an ensumble over all four", "nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data() affine = nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path)", "tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= vxf, vxt", "z-ll_pad), min(shape[2], z+lr_pad) txf, txt = max(0, ll_pad-x), max(0, ll_pad-x)", "1], low[0, 2], low[0, 3] = low[0, 0] + flair[0,0,0],", "brain_mask) shape = t1.shape # to exclude batch_size final_prediction =", "= perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1,", "y_min, min(shape[1] - N, y_max), z_min, min(shape[2] - N, z_max)", "for z in range(z_min, z_max, N//2): high = np.zeros((1, 4,", ":], (resize_to, resize_to, resize_to)) for i in range(4)] high =", "txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= low1[0]", "brain_mask, mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask)", "pdb import os from ..helpers.helper import * from os.path import", "+ t1[0,0,0], low1[0, 2] + t1ce[0,0,0] # ========================================================================= vxf, vxt", "vzf, vzt = max(0, z-hl_pad), min(shape[2], z+hr_pad) txf, txt =", "inner class classification more on training details and network information:", "4, low_res_size, low_res_size, low_res_size)) low1 = np.zeros((1, 4, resize_to, resize_to,", "= bin_path): device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") #", "flair[x:x+N, y:y+N, z:z+N] high[0, 1, :, :, :] = t2[x:x+N,", "returns: maskvolume (numpy uint8 type) \"\"\" mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz')", "brain_mask) flair_v = normalize(flair_v, brain_mask) generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for", "2] + t1[0,0,0], low1[0, 2] + t1ce[0,0,0] # ========================================================================= vxf,", "(numpy array) t2_v = t2 volume (numpy array) flair_v =", "self.ABLnclasses) ## intialize the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill", "t1ce_slice array[:,:,3] = t1_slice transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0)", "patch size during inference \"\"\" t1 = normalize(t1, brain_mask) t1ce", "vzf, vzt = max(0, z-ll_pad), min(shape[2], z+lr_pad) txf, txt =", "return final_prediction def inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2, flair, brain_mask, mask=None,", "+'ImageMath 3 '+ mask_path +' ME '+ mask_path +' 1')", "0] + flair[0,0,0], low1[0, 1] + t2[0,0,0], low1[0, 2] +", "1, :, :, :] = t2[x:x+N, y:y+N, z:z+N] high[0, 2,", "9): \"\"\" output of BNet3D prediction_size = mid inference patch", "from torch.autograd import Variable from skimage.transform import resize from torchvision", "t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices]) array", "pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred = pred.numpy() final_prediction[:, x:x+prediction_size,", "model with trained params print (\"=================================== ABLNET2D Loaded =================================\") self.ABLnet.eval()", "prediction_size + 16 resize_to = int(prediction_size ** 0.5) + 16", "= t1ce_slice array = np.uint8(array) transformed_array = transformSequence(array) transformed_array =", "network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True (just evaluates on Dual", "'+ mask_path +' '+ mask_path +' 0.01 1') os.system(self.ants_path +'ImageMath", "normalize(t1_v, brain_mask) t1c_v = normalize(t1c_v, brain_mask) t2_v = normalize(t2_v, brain_mask)", "= t2[x:x+N, y:y+N, z:z+N] high[0, 2, :, :, :] =", ".models.modelABL import FCDenseNet103 self.ABLnclasses = 3 self.ABLnet = FCDenseNet103(n_classes =", "= self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1, 3) final_prediction_array = np.array([final_predictionTir3D_logits,", "min(shape[0] - N, x_max), y_min, min(shape[1] - N, y_max), z_min,", "=================================\") self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device) #======================================================================================== # Tir2D net....................... from", "final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2,", "name + 'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path, name + 't1.nii.gz')).get_data() t1ce", "t1c_v = t1c volume (numpy array) t2_v = t2 volume", "%d %b %Y %H:%M:%S +0000\", gmtime()) + \") Working on:", "high = Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1,", "bbox(mask, pad = N) x_min, x_max, y_min, y_max, z_min, z_max", "final_pred) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save: save_volume(final_pred,", "= (high_res_size - prediction_size)//2 hr_pad = hl_pad + prediction_size ll_pad", "max(0, ll_pad-z) + vzt - vzf low[0, 0, txf:txt, tyf:tyt,", "+ flair[0,0,0], low1[0, 1] + t2[0,0,0], low1[0, 2] + t1[0,0,0],", "np.zeros((1, 4, low_res_size, low_res_size, low_res_size)) low1 = np.zeros((1, 4, resize_to,", "\"\"\" We make use of ants framework for generalized skull", "'+ t1_path+' '+ mask_path +' '+ mask_path +' 1 1", "- vzf high[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt,", "t2_volume, flair_volume): \"\"\" output of 2D tiramisu model (MNet) \"\"\"", "in the provided directory returns: segmentation mask \"\"\" t1 =", "z_max) with torch.no_grad(): for x in tqdm(range(x_min, x_max, N//2)): for", "<NAME> # contact: <EMAIL> import torch import SimpleITK as sitk", "as nib from torch.autograd import Variable from skimage.transform import resize", "Air Brain Lesson model (2D model, 103 layered) 2. BNet3Dnet", "for segmentation mask estimation one for the patient data in", "tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 3, txf:txt, tyf:tyt,", "(numpy array) brain_mask = brain, whole tumor mask (numpy array,", "of 3D tiramisu model (tir3Dnet) mask = numpy array output", "'+ mask_path +' '+ mask_path +' 1 1 1') mask", "'+ mask_path +' 1 1 1') mask = np.uint8(nib.load(mask_path).get_data()) return", "t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= vxf, vxt = max(0, x-ll_pad),", "ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== #", "quick: # BrainNet3D model...................... from .models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses =", "1') mask = np.uint8(nib.load(mask_path).get_data()) return mask def get_localization(self, t1_v, t1c_v,", "neccessary if batch size == 1 transformed_array = transformed_array.to(self.device) logits", "os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred def get_segmentation_brats(self,", "self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D Loaded ===================================\") self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device)", "slices in tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice", "1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' MD '+ mask_path", "y+hr_pad) vzf, vzt = max(0, z-hl_pad), min(shape[2], z+hr_pad) txf, txt", "self.Mnclasses = 4 self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D, map_location=map_location)", "prediction functions..................... bin_path = os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\" class performs", "t2[0,0,0], low1[0, 2] + t1[0,0,0], low1[0, 2] + t1ce[0,0,0] #", "final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max,", "if save_path provided function saves the prediction with DeepBrainSeg_Prediction.nii.qz name", "segmentation mask \"\"\" name = path.split(\"/\")[-1] + \"_\" flair =", "vyf, vyt = max(0, y-ll_pad), min(shape[1], y+lr_pad) vzf, vzt =", "vyf tzf, tzt = max(0, hl_pad-z), max(0, hl_pad-z) + vzt", "return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1, t1ce, t2, flair, brain_mask, mask,", "import torch import SimpleITK as sitk import numpy as np", "nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data()", "N = 64): \"\"\" output of 3D tiramisu model (tir3Dnet)", "whole tumor mask (numpy array, output of ANTs pieline) \"\"\"", "array = np.uint8(array) transformed_array = transformSequence(array) transformed_array = transformed_array.unsqueeze(0) transformed_array", "for y in (range(y_min, y_max - prediction_size, prediction_size)): for z", "t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt,", "transformed_array = transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x", "format if save_path provided function saves the prediction with DeepBrainSeg_Prediction.nii.qz", "the data in BraTs format if save True saves the", "os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar')", "#======================================================================================== if not quick: # BrainNet3D model...................... from .models.model3DBNET import", "x in tqdm(range(x_min, x_max - prediction_size, prediction_size)): for y in", "tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 1, txf:txt, tyf:tyt, tzf:tzt]", "hl_pad-y), max(0, hl_pad-y) + vyt - vyf tzf, tzt =", "= torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0) ## neccessary if batch size", "skimage.transform import resize from torchvision import transforms from time import", "mask_path +' MD '+ mask_path +' 1') os.system(self.ants_path +'ImageMath 3", "type) \"\"\" mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath 3 '+", "self.BNET3Dnet = self.BNET3Dnet.to(device) #======================================================================================== # Tir3D model................... from .models.modelTir3D import", "np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices])", "mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask) final_predictionMnet_logits", "graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model with trained", "= transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1, 2) return", "np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice =", "= normalize(flair_v, brain_mask) generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in", "= mid inference patch size \"\"\" t1 = normalize(t1, brain_mask)", "import transforms from time import gmtime, strftime from tqdm import", "name = path.split(\"/\")[-1] + \"_\" flair = nib.load(os.path.join(path, name +", "logits.transpose(0, 1, 3, 2) final_pred = apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred)", "final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array =", "x+lr_pad) vyf, vyt = max(0, y-ll_pad), min(shape[1], y+lr_pad) vzf, vzt", "mask 1. ABLnet for reducing false positives outside the brain", "= nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path, name +", "\"_\" flair = nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path,", "max(0, y-hl_pad), min(shape[1], y+hr_pad) vzf, vzt = max(0, z-hl_pad), min(shape[2],", "in range(y_min, y_max, N//2): for z in range(z_min, z_max, N//2):", "flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array = np.uint8(array)", "max(0, hl_pad-x), max(0, hl_pad-x) + vxt - vxf tyf, tyt", "= np.swapaxes(generated_output,1, 2) return outs def get_segmentation(self, t1_path, t2_path, t1ce_path,", "+' Normalize '+ t1_path) os.system(self.ants_path +'ThresholdImage 3 '+ mask_path +'", "format step followed for in estimation of segmentation mask 1.", "hr_pad = hl_pad + prediction_size ll_pad = (low_res_size - prediction_size)//2", "0.225]) transformList = [] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2]))", "tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 1, txf:txt, tyf:tyt,", "= False, ants_path = bin_path): device = torch.device(\"cuda:0\" if torch.cuda.is_available()", "high_res_size, high_res_size)) low = np.zeros((1, 4, low_res_size, low_res_size, low_res_size)) low1", "np.uint8(array) transformed_array = transformSequence(array) transformed_array = transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device)", "2. BNet3Dnet 3D network for inner class classification Dual Path", "final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask) final_predictionMnet_logits =", "..helpers.helper import * from os.path import expanduser home = expanduser(\"~\")", "os.system(self.ants_path +'ImageMath 3 '+ mask_path +' Normalize '+ t1_path) os.system(self.ants_path", "inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume): \"\"\" output of 2D tiramisu model", "min(shape[0], x+lr_pad) vyf, vyt = max(0, y-ll_pad), min(shape[1], y+lr_pad) vzf,", "self.device = device self.quick = quick self.ants_path = ants_path def", "N//2): high = np.zeros((1, 4, N, N, N)) high[0, 0,", "'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path, name + 't1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path,", "high[0, 1, :, :, :] = t2[x:x+N, y:y+N, z:z+N] high[0,", "for inner class classification 4. Tir3Dnet 57 layered 3D convolutional", "2], low1[0, 3] = low1[0, 0] + flair[0,0,0], low1[0, 1]", "(range(z_min, z_max - prediction_size, prediction_size)): high = np.zeros((1, 4, high_res_size,", "self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device) #======================================================================================== self.device = device self.quick =", "# contact: <EMAIL> import torch import SimpleITK as sitk import", "(https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True (just evaluates on Dual path network", "final_pred def get_segmentation_brats(self, path, save = True): \"\"\" Generates segmentation", "array) flair_v = flair volume (numpy array) brain_mask = brain,", "pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy() final_prediction[:, x:x+N, y:y+N, z:z+N]", "vxt = max(0, x-ll_pad), min(shape[0], x+lr_pad) vyf, vyt = max(0,", "prediction_size)//2 hr_pad = hl_pad + prediction_size ll_pad = (low_res_size -", "save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred # ======================================================================================== if __name__", "output of 2D tiramisu model (MNet) \"\"\" normalize = transforms.Normalize([0.485,", "ratio calculation high_res_size = prediction_size + 16 resize_to = int(prediction_size", "np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1, t1ce, t2, flair, brain_mask, mask, N", "txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 3, txf:txt,", "on training details and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True", "segmentation for the data not in brats format if save_path", "slices] = logits.transpose(0, 1, 3, 2) final_pred = apply_argmax_to_logits(generated_output_logits) final_pred", "self.quick = quick self.ants_path = ants_path def get_ants_mask(self, t1_path): \"\"\"", "low1[0, 0], low1[0, 1], low1[0, 2], low1[0, 3] = low1[0,", "brain_mask = self.get_ants_mask(os.path.join(path, name + 't2.nii.gz')) # brain_mask = get_brain_mask(t1)", "Tir3Dnet 57 layered 3D convolutional network for inner class classification", "min(shape[0], x+hr_pad) vyf, vyt = max(0, y-hl_pad), min(shape[1], y+hr_pad) vzf,", "= np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2] =", "patient data in brats format other with any random format", "= normalize(t1_v, brain_mask) t1c_v = normalize(t1c_v, brain_mask) t2_v = normalize(t2_v,", "= None): \"\"\" Generates segmentation for the data not in", "#======================================================================================== # Tir2D net....................... from .models.modelTir2D import FCDenseNet57 self.Mnclasses =", "scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice", "resize_to, resize_to)) for i in range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float() low1", "high_res_size, high_res_size, high_res_size)) low = np.zeros((1, 4, low_res_size, low_res_size, low_res_size))", "'+ mask_path +' MD '+ mask_path +' 1') os.system(self.ants_path +'ImageMath", "affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg] (\" +", "shape = t1.shape # to exclude batch_size final_prediction = np.zeros((self.T3Dnclasses,", "Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device) #======================================================================================== self.device = device", "= os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air brain lesion segmentation.............. from", "for generalized skull stripping t1_path: t1 volume path (str) saves", "resize_to)) high[0, 0], high[0, 1], high[0, 2], high[0, 3] =", "t1.shape # to exclude batch_size final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1],", "network (BNet3D) else copmutes an ensumble over all four networks", "nib from torch.autograd import Variable from skimage.transform import resize from", "= (low_res_size - prediction_size)//2 lr_pad = ll_pad + prediction_size for", "= max(0, ll_pad-x), max(0, ll_pad-x) + vxt - vxf tyf,", "= t1[x:x+N, y:y+N, z:z+N] high[0, 3, :, :, :] =", "in tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice =", "tzt = max(0, hl_pad-z), max(0, hl_pad-z) + vzt - vzf", "Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device) #======================================================================================== # Tir3D model...................", "= flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 1, txf:txt, tyf:tyt, tzf:tzt] =", "model (2D model, 103 layered) 2. BNet3Dnet 3D network for", "def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume): \"\"\" output of 2D tiramisu", "= bbox(mask, pad = prediction_size) # obtained by aspect ratio", "a given sequence of patient data. to main platform for", "- prediction_size, prediction_size)): for y in (range(y_min, y_max - prediction_size,", "final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits])", "2] + t1ce[0,0,0] low[0, 0], low[0, 1], low[0, 2], low[0,", "torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0) ## neccessary if batch size ==", "print (\"================================== TIRNET2D Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device) #========================================================================================", "Whole tumor region t1_v = t1 volume (numpy array) t1c_v", "= quick self.ants_path = ants_path def get_ants_mask(self, t1_path): \"\"\" We", "directory returns: maskvolume (numpy uint8 type) \"\"\" mask_path = os.path.join(os.path.dirname(t1_path),", "t1ce = nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data() affine = nib.load(flair_path).affine brain_mask", "over all four networks \"\"\" def __init__(self, quick = False,", "vyf:vyt, vzf:vzt] low[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt,", "in brats format other with any random format step followed", "mask_path +' ME '+ mask_path +' 1') os.system(self.ants_path +'CopyImageHeaderInformation '+", "= int(51*resize_to/19) hl_pad = (high_res_size - prediction_size)//2 hr_pad = hl_pad", "z:z+N] high[0, 2, :, :, :] = t1[x:x+N, y:y+N, z:z+N]", "mask (numpy array, output of ANTs pieline) \"\"\" t1_v =", "+ 't1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path, name + 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path,", "name + 't1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data() t2", "patients data path returns : segmentation mask \"\"\" name =", "vyf, vyt = max(0, y-hl_pad), min(shape[1], y+hr_pad) vzf, vzt =", "0], low1[0, 1], low1[0, 2], low1[0, 3] = low1[0, 0]", "# -*- coding: utf-8 -*- # # author: <NAME> #", "self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1, 3) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits,", "the save directory in the patients data path returns :", "not self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask)", "1] + t2[0,0,0], low1[0, 2] + t1[0,0,0], low1[0, 2] +", "'t2.nii.gz')) # brain_mask = get_brain_mask(t1) mask = self.get_localization(t1, t1ce, t2,", ":, :, :], (resize_to, resize_to, resize_to)) for i in range(4)]", "def get_ants_mask(self, t1_path): \"\"\" We make use of ants framework", "1, 3) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits =", "to main platform for segmentation mask estimation one for the", "Variable from skimage.transform import resize from torchvision import transforms from", "= scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array =", "lr_pad = ll_pad + prediction_size for x in tqdm(range(x_min, x_max", "low[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0,", ":] = t2[x:x+N, y:y+N, z:z+N] high[0, 2, :, :, :]", "t1c_v, t2_v, flair_v, brain_mask): \"\"\" ABLnetwork output, finds the brain,", "author: <NAME> # contact: <EMAIL> import torch import SimpleITK as", "import tqdm import pdb import os from ..helpers.helper import *", "64): \"\"\" output of 3D tiramisu model (tir3Dnet) mask =", "z in (range(z_min, z_max - prediction_size, prediction_size)): high = np.zeros((1,", "t1_slice transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0) ## neccessary if", "max(0, x-ll_pad), min(shape[0], x+lr_pad) vyf, vyt = max(0, y-ll_pad), min(shape[1],", "low[0, 2] + t1ce[0,0,0] low1[0, 0], low1[0, 1], low1[0, 2],", "= os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' Normalize", "max(0, hl_pad-y) + vyt - vyf tzf, tzt = max(0,", "3] = low[0, 0] + flair[0,0,0], low[0, 1] + t2[0,0,0],", "max(0, hl_pad-z) + vzt - vzf high[0, 0, txf:txt, tyf:tyt,", "hl_pad + prediction_size ll_pad = (low_res_size - prediction_size)//2 lr_pad =", "python # -*- coding: utf-8 -*- # # author: <NAME>", "volume (numpy array) flair_v = flair volume (numpy array) brain_mask", "= 9): \"\"\" output of BNet3D prediction_size = mid inference", "low1, pred_size=prediction_size).detach().cpu()) pred = pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] =", "t1c volume (numpy array) t2_v = t2 volume (numpy array)", "brain_mask) flair = normalize(flair, brain_mask) shape = t1.shape # to", "t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0]", "name + 't1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path, name + 't2.nii.gz')).get_data() affine=", "= np.uint8(nib.load(mask_path).get_data()) return mask def get_localization(self, t1_v, t1c_v, t2_v, flair_v,", "t1_path: t1 volume path (str) saves the mask in the", "z_max, N//2): high = np.zeros((1, 4, N, N, N)) high[0,", "low[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0,", "= [resize(low[0, i, :, :, :], (resize_to, resize_to, resize_to)) for", "= self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array)", "t2 = normalize(t2, brain_mask) flair = normalize(flair, brain_mask) shape =", "Loaded ===================================\") self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device) #======================================================================================== if not quick:", "np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask,", "= np.zeros((1, 4, high_res_size, high_res_size, high_res_size)) low = np.zeros((1, 4,", "x_min, min(shape[0] - N, x_max), y_min, min(shape[1] - N, y_max),", "transformed_array = transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1, 2)", "for y in range(y_min, y_max, N//2): for z in range(z_min,", "low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred =", "t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0]", "0) if not self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair,", "= np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] =", "low1 = np.zeros((1, 4, resize_to, resize_to, resize_to)) high[0, 0], high[0,", "mask_path +' Normalize '+ t1_path) os.system(self.ants_path +'ThresholdImage 3 '+ mask_path", "transformSequence(array) transformed_array = transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy()", "= t1c volume (numpy array) t2_v = t2 volume (numpy", "if save: save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred # ========================================================================================", "os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred def get_segmentation_brats(self, path, save = True):", "0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 1,", "def inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2, flair, brain_mask, mask=None, prediction_size =", "from torchvision import transforms from time import gmtime, strftime from", "z:z+prediction_size] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_2D(self,", "x+hr_pad) vyf, vyt = max(0, y-hl_pad), min(shape[1], y+hr_pad) vzf, vzt", ":, :] = t2[x:x+N, y:y+N, z:z+N] high[0, 2, :, :,", "3] = high[0, 0] + flair[0,0,0], high[0, 1] + t2[0,0,0],", "t1_v = t1 volume (numpy array) t1c_v = t1c volume", "array[:,:,2] = t1ce_slice array = np.uint8(array) transformed_array = transformSequence(array) transformed_array", "txt = max(0, hl_pad-x), max(0, hl_pad-x) + vxt - vxf", "x-ll_pad), min(shape[0], x+lr_pad) vyf, vyt = max(0, y-ll_pad), min(shape[1], y+lr_pad)", "reducing false positives outside the brain Air Brain Lesson model", "+ vzt - vzf low[0, 0, txf:txt, tyf:tyt, tzf:tzt] =", "scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3))", "self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model with trained params print (\"===================================", "if not quick: # BrainNet3D model...................... from .models.model3DBNET import BrainNet_3D_Inception", "'.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air brain lesion segmentation.............. from .models.modelABL import", "data in BraTs format if save True saves the prediction", "3) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1,", "+ t1ce[0,0,0] low1[0, 0], low1[0, 1], low1[0, 2], low1[0, 3]", "t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice array[:,:,1]", "inference patch size \"\"\" t1 = normalize(t1, brain_mask) t1ce =", "x_max, y_min, y_max, z_min, z_max = x_min, min(shape[0] - N,", "hl_pad-z) + vzt - vzf high[0, 0, txf:txt, tyf:tyt, tzf:tzt]", "mask_path +' '+ mask_path +' 1 1 1') mask =", "N)) high[0, 0, :, :, :] = flair[x:x+N, y:y+N, z:z+N]", "vyt = max(0, y-hl_pad), min(shape[1], y+hr_pad) vzf, vzt = max(0,", "= nib.load(os.path.join(path, name + 't1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path, name +", "\"\"\" t1 = nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data()", "false positives outside the brain Air Brain Lesson model (2D", "network 3. MNet2D 57 layered convolutional network for inner class", "high[0, 1] + t2[0,0,0], high[0, 2] + t1[0,0,0], high[0, 2]", "min(shape[1], y+lr_pad) vzf, vzt = max(0, z-ll_pad), min(shape[2], z+lr_pad) txf,", "transforms from time import gmtime, strftime from tqdm import tqdm", "+'ThresholdImage 3 '+ mask_path +' '+ mask_path +' 0.01 1')", "convolutional network for inner class classification 4. Tir3Dnet 57 layered", "x_min, x_max, y_min, y_max, z_min, z_max = bbox(mask, pad =", "57 layered convolutional network for inner class classification 4. Tir3Dnet", "function saves the prediction with DeepBrainSeg_Prediction.nii.qz name in the provided", "sequence of patient data. to main platform for segmentation mask", "flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice", "flair_v = flair volume (numpy array) brain_mask = brain, whole", "ants_path = bin_path): device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")", "high = np.zeros((1, 4, high_res_size, high_res_size, high_res_size)) low = np.zeros((1,", "t1ce[0,0,0] low1[0, 0], low1[0, 1], low1[0, 2], low1[0, 3] =", "size during inference \"\"\" t1 = normalize(t1, brain_mask) t1ce =", "#======================================================================================== # Tir3D model................... from .models.modelTir3D import FCDenseNet57 self.T3Dnclasses =", "= 4 self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict'])", "z_max = x_min, min(shape[0] - N, x_max), y_min, min(shape[1] -", "the patients data path returns : segmentation mask \"\"\" name", "# air brain lesion segmentation.............. from .models.modelABL import FCDenseNet103 self.ABLnclasses", "adjust_classes(final_pred) if save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return", "t2_v = normalize(t2_v, brain_mask) flair_v = normalize(flair_v, brain_mask) generated_output_logits =", "for the data not in brats format if save_path provided", "low1[0, 1] + t2[0,0,0], low1[0, 2] + t1[0,0,0], low1[0, 2]", "N) x_min, x_max, y_min, y_max, z_min, z_max = x_min, min(shape[0]", "= nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path, name +", "= normalize(t2, brain_mask) flair = normalize(flair, brain_mask) shape = t1.shape", "vzt = max(0, z-ll_pad), min(shape[2], z+lr_pad) txf, txt = max(0,", "path, save = True): \"\"\" Generates segmentation for the data", "= 64): \"\"\" output of 3D tiramisu model (tir3Dnet) mask", "(\"================================== TIRNET2D Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device) #======================================================================================== self.device", "max(0, x-hl_pad), min(shape[0], x+hr_pad) vyf, vyt = max(0, y-hl_pad), min(shape[1],", "final_prediction def inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2, flair, brain_mask, mask=None, prediction_size", "+' '+ mask_path +' 0.01 1') os.system(self.ants_path +'ImageMath 3 '+", "network for inner class classification Dual Path way network 3.", "self.Tir3Dnet.to(device) #======================================================================================== self.device = device self.quick = quick self.ants_path =", "%b %Y %H:%M:%S +0000\", gmtime()) + \") Working on: \",", "max(0, hl_pad-z), max(0, hl_pad-z) + vzt - vzf high[0, 0,", "tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 2, txf:txt, tyf:tyt, tzf:tzt]", "Generates segmentation for the data not in brats format if", "vzf:vzt] # ========================================================================= low1[0] = [resize(low[0, i, :, :, :],", "transformed_array = transformSequence(array) transformed_array = transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device) outs", "import resize from torchvision import transforms from time import gmtime,", "Lesson model (2D model, 103 layered) 2. BNet3Dnet 3D network", "os from ..helpers.helper import * from os.path import expanduser home", "z+hr_pad) txf, txt = max(0, hl_pad-x), max(0, hl_pad-x) + vxt", "import os from ..helpers.helper import * from os.path import expanduser", "os.system(self.ants_path +'ImageMath 3 '+ mask_path +' MD '+ mask_path +'", "slices in tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice", "exclude batch_size final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max,", "mask = self.get_localization(t1, t1ce, t2, flair, brain_mask) # mask =", "3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # =========================================================================", "# mask = np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits =", "normalize(flair, brain_mask) shape = t1.shape # to exclude batch_size final_prediction", "as sitk import numpy as np import nibabel as nib", "t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt,", "with torch.no_grad(): for x in tqdm(range(x_min, x_max, N//2)): for y", "= np.zeros((1, 4, low_res_size, low_res_size, low_res_size)) low1 = np.zeros((1, 4,", "t2[0,0,0], high[0, 2] + t1[0,0,0], high[0, 2] + t1ce[0,0,0] low[0,", "0.01 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' MD '+", "3 x 240 x 240 generated_output_logits[:,:,:, slices] = logits.transpose(0, 1,", "t1ce_path, flair_path, save_path = None): \"\"\" Generates segmentation for the", "===================================\") self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device) #======================================================================================== if not quick: #", "= max(0, hl_pad-z), max(0, hl_pad-z) + vzt - vzf high[0,", ": segmentation mask \"\"\" name = path.split(\"/\")[-1] + \"_\" flair", "1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 2,", "vzf:vzt] # ========================================================================= vxf, vxt = max(0, x-ll_pad), min(shape[0], x+lr_pad)", "ckpt = torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D Loaded =================================\")", "x_min, x_max, y_min, y_max, z_min, z_max = x_min, min(shape[0] -", "+ vzt - vzf high[0, 0, txf:txt, tyf:tyt, tzf:tzt] =", "tiramisu model (tir3Dnet) mask = numpy array output of ABLnet", "========================================================================= low1[0] = [resize(low[0, i, :, :, :], (resize_to, resize_to,", "the prediction in the save directory in the patients data", "BNet3Dnet 3D network for inner class classification Dual Path way", "import * from os.path import expanduser home = expanduser(\"~\") #========================================================================================", "of 2D tiramisu model (MNet) \"\"\" normalize = transforms.Normalize([0.485, 0.456,", "high[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0,", "x_max), y_min, min(shape[1] - N, y_max), z_min, min(shape[2] - N,", "contact: <EMAIL> import torch import SimpleITK as sitk import numpy", "(low_res_size - prediction_size)//2 lr_pad = ll_pad + prediction_size for x", "nib.load(os.path.join(path, name + 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine print", "import nibabel as nib from torch.autograd import Variable from skimage.transform", "final_pred = combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred)", "+ t2[0,0,0], low[0, 2] + t1[0,0,0], low[0, 2] + t1ce[0,0,0]", "ME '+ mask_path +' 1') os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+", "= t1 volume (numpy array) t1c_v = t1c volume (numpy", "in range(z_min, z_max, N//2): high = np.zeros((1, 4, N, N,", ":, :] = flair[x:x+N, y:y+N, z:z+N] high[0, 1, :, :,", "os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\" class performs segmentation for a given", ":, :], (resize_to, resize_to, resize_to)) for i in range(4)] high", "combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save:", "= self.get_localization(t1, t1ce, t2, flair, brain_mask) mask = np.swapaxes(mask,1, 0)", "final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1, 3) final_prediction_array =", "prediction_size = mid inference patch size \"\"\" t1 = normalize(t1,", "uint8 type) \"\"\" mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath 3", "of ABLnet N = patch size during inference \"\"\" t1", "= max(0, ll_pad-y), max(0, ll_pad-y) + vyt - vyf tzf,", "brain lesion segmentation.............. from .models.modelABL import FCDenseNet103 self.ABLnclasses = 3", "mask = np.uint8(nib.load(mask_path).get_data()) return mask def get_localization(self, t1_v, t1c_v, t2_v,", "= adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1, t1ce, t2, flair,", "= pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume,", "BrainNet_3D_Inception self.B3Dnclasses = 5 self.BNET3Dnet = BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D,", "<EMAIL> import torch import SimpleITK as sitk import numpy as", "directory in the patients data path returns : segmentation mask", "= adjust_classes(final_pred) if save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction'))", "brain_mask = get_brain_mask(t1) mask = self.get_localization(t1, t1ce, t2, flair, brain_mask)", "from ..helpers.helper import * from os.path import expanduser home =", "+ 'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path, name + 't1.nii.gz')).get_data() t1ce =", "high[0, 0, :, :, :] = flair[x:x+N, y:y+N, z:z+N] high[0,", "perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1, t1ce,", "self.ABLnet.to(device) #======================================================================================== # Tir2D net....................... from .models.modelTir2D import FCDenseNet57 self.Mnclasses", "self.get_localization(t1, t1ce, t2, flair, brain_mask) mask = np.swapaxes(mask,1, 0) if", "array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array = np.uint8(array) transformed_array", "= postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred) final_pred", "of segmentation mask 1. ABLnet for reducing false positives outside", "x_max, N//2)): for y in range(y_min, y_max, N//2): for z", "'DeepBrainSeg_Prediction')) return final_pred def get_segmentation_brats(self, path, save = True): \"\"\"", "flair_path, save_path = None): \"\"\" Generates segmentation for the data", "pad = prediction_size) # obtained by aspect ratio calculation high_res_size", "final_pred # ======================================================================================== if __name__ == '__main__': ext = deepSeg(True)", "+' MD '+ mask_path +' 1') os.system(self.ants_path +'ImageMath 3 '+", "torchvision import transforms from time import gmtime, strftime from tqdm", "range(y_min, y_max, N//2): for z in range(z_min, z_max, N//2): high", "1] + t2[0,0,0], high[0, 2] + t1[0,0,0], high[0, 2] +", "\"\"\" def __init__(self, quick = False, ants_path = bin_path): device", "0, :, :, :] = flair[x:x+N, y:y+N, z:z+N] high[0, 1,", "+ t1ce[0,0,0] low[0, 0], low[0, 1], low[0, 2], low[0, 3]", "= max(0, ll_pad-z), max(0, ll_pad-z) + vzt - vzf low[0,", "vzf high[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt]", "= t2_slice array[:,:,2] = t1ce_slice array = np.uint8(array) transformed_array =", "numpy array output of ABLnet N = patch size during", "- prediction_size, prediction_size)): for z in (range(z_min, z_max - prediction_size,", "print (\"=================================== MNET2D Loaded ===================================\") self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device) #========================================================================================", "final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save_path: os.makedirs(save_path, exist_ok=True)", "stripping t1_path: t1 volume path (str) saves the mask in", "- N, x_max), y_min, min(shape[1] - N, y_max), z_min, min(shape[2]", "os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air brain lesion segmentation.............. from .models.modelABL", "= apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred)", "= nib.load(flair_path).get_data() affine = nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path) mask =", "MD '+ mask_path +' 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path", "save True saves the prediction in the save directory in", "final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return", "flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array", "t1c_v = normalize(t1c_v, brain_mask) t2_v = normalize(t2_v, brain_mask) flair_v =", "+ t2[0,0,0], high[0, 2] + t1[0,0,0], high[0, 2] + t1ce[0,0,0]", "in range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred =", "provided directory returns: segmentation mask \"\"\" t1 = nib.load(t1_path).get_data() t2", "vyf:vyt, vzf:vzt] low[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt,", "= Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred = pred.numpy()", "flair_volume): \"\"\" output of 2D tiramisu model (MNet) \"\"\" normalize", "with DeepBrainSeg_Prediction.nii.qz name in the provided directory returns: segmentation mask", "outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1, 2) return outs def", "txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 1, txf:txt,", "mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1, 3) final_prediction_array", "brain_mask) t1c_v = normalize(t1c_v, brain_mask) t2_v = normalize(t2_v, brain_mask) flair_v", "np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max, z_min, z_max", "path) brain_mask = self.get_ants_mask(os.path.join(path, name + 't2.nii.gz')) # brain_mask =", "=================================\") self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device) #======================================================================================== self.device = device self.quick", "name in the provided directory returns: segmentation mask \"\"\" t1", "patch size \"\"\" t1 = normalize(t1, brain_mask) t1ce = normalize(t1ce,", "for the data in BraTs format if save True saves", "/usr/bin/env python # -*- coding: utf-8 -*- # # author:", "low_res_size)) low1 = np.zeros((1, 4, resize_to, resize_to, resize_to)) high[0, 0],", "random format step followed for in estimation of segmentation mask", "get_brain_mask(t1) mask = self.get_localization(t1, t1ce, t2, flair, brain_mask) mask =", "pad = N) x_min, x_max, y_min, y_max, z_min, z_max =", "self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred", "segmentation mask \"\"\" t1 = nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data() t1ce", "else \"cpu\") # device = \"cpu\" map_location = device #========================================================================================", "model................... from .models.modelTir3D import FCDenseNet57 self.T3Dnclasses = 5 self.Tir3Dnet =", "for i in range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float()", "layered convolutional network for inner class classification 4. Tir3Dnet 57", "#======================================================================================== self.device = device self.quick = quick self.ants_path = ants_path", ".models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses = 5 self.BNET3Dnet = BrainNet_3D_Inception() ckpt", "prediction_size) # obtained by aspect ratio calculation high_res_size = prediction_size", "- prediction_size)//2 hr_pad = hl_pad + prediction_size ll_pad = (low_res_size", "= np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair)", "platform for segmentation mask estimation one for the patient data", "tyt = max(0, ll_pad-y), max(0, ll_pad-y) + vyt - vyf", "= low1[0, 0] + flair[0,0,0], low1[0, 1] + t2[0,0,0], low1[0,", "mask = self.get_localization(t1, t1ce, t2, flair, brain_mask) mask = np.swapaxes(mask,1,", "generated_output_logits[:,:,:, slices] = logits.transpose(0, 1, 3, 2) final_pred = apply_argmax_to_logits(generated_output_logits)", "= prediction_size + 16 resize_to = int(prediction_size ** 0.5) +", "brain_mask) generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in tqdm(range(flair_v.shape[2])): flair_slice", "# to exclude batch_size final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2]))", "intialize the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model", "brain_mask, mask, N = 64): \"\"\" output of 3D tiramisu", "= os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\" class performs segmentation for a", "t1[0,0,0], low[0, 2] + t1ce[0,0,0] low1[0, 0], low1[0, 1], low1[0,", "vyf:vyt, vzf:vzt] high[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt,", "FCDenseNet57 self.Mnclasses = 4 self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D,", "ABLNET2D Loaded =================================\") self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device) #======================================================================================== # Tir2D", "t2 = nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data() affine", "tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 1, txf:txt, tyf:tyt, tzf:tzt]", "Loaded =================================\") self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device) #======================================================================================== # Tir2D net.......................", "flair[0,0,0], low1[0, 1] + t2[0,0,0], low1[0, 2] + t1[0,0,0], low1[0,", "ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D =", "(BNet3D) else copmutes an ensumble over all four networks \"\"\"", "= max(0, z-hl_pad), min(shape[2], z+hr_pad) txf, txt = max(0, hl_pad-x),", "2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 3,", "'+ mask_path +' Normalize '+ t1_path) os.system(self.ants_path +'ThresholdImage 3 '+", "estimation one for the patient data in brats format other", "t2 = nib.load(os.path.join(path, name + 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name +", "flair, brain_mask, mask=None, prediction_size = 9): \"\"\" output of BNet3D", "exclude batch_size final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max,", "N//2): for z in range(z_min, z_max, N//2): high = np.zeros((1,", "prediction_size)): high = np.zeros((1, 4, high_res_size, high_res_size, high_res_size)) low =", "np.uint8(nib.load(mask_path).get_data()) return mask def get_localization(self, t1_v, t1c_v, t2_v, flair_v, brain_mask):", "True (just evaluates on Dual path network (BNet3D) else copmutes", "self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"==================================", "pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_DualPath(self, t1, t1ce,", "high[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0,", "t1ce[0,0,0] low[0, 0], low[0, 1], low[0, 2], low[0, 3] =", "low_res_size, low_res_size)) low1 = np.zeros((1, 4, resize_to, resize_to, resize_to)) high[0,", "= normalize(flair, brain_mask) shape = t1.shape # to exclude batch_size", "t1ce_slice array = np.uint8(array) transformed_array = transformSequence(array) transformed_array = transformed_array.unsqueeze(0)", "high[0, 3] = high[0, 0] + flair[0,0,0], high[0, 1] +", "import FCDenseNet57 self.T3Dnclasses = 5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt =", "= torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D Loaded =================================\") self.Tir3Dnet.eval()", "if batch size == 1 transformed_array = transformed_array.to(self.device) logits =", "= int(prediction_size ** 0.5) + 16 low_res_size = int(51*resize_to/19) hl_pad", "Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy() final_prediction[:, x:x+N, y:y+N,", "in tqdm(range(x_min, x_max - prediction_size, prediction_size)): for y in (range(y_min,", "np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice", "convolutional network for inner class classification more on training details", "+ vyt - vyf tzf, tzt = max(0, hl_pad-z), max(0,", "min(shape[2], z+hr_pad) txf, txt = max(0, hl_pad-x), max(0, hl_pad-x) +", "combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save_path:", "tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 3, txf:txt, tyf:tyt, tzf:tzt]", "ants_path def get_ants_mask(self, t1_path): \"\"\" We make use of ants", "trained params print (\"=================================== ABLNET2D Loaded =================================\") self.ABLnet.eval() self.ABLnet =", "inner_class_classification_with_logits_NCube(self, t1, t1ce, t2, flair, brain_mask, mask, N = 64):", "high_res_size)) low = np.zeros((1, 4, low_res_size, low_res_size, low_res_size)) low1 =", "the model with trained params print (\"=================================== ABLNET2D Loaded =================================\")", "low1[0, 2] + t1ce[0,0,0] # ========================================================================= vxf, vxt = max(0,", "N//2)): for y in range(y_min, y_max, N//2): for z in", "bbox(mask, pad = prediction_size) # obtained by aspect ratio calculation", "flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt,", "t1, t1ce, t2, flair, brain_mask, mask, N = 64): \"\"\"", "all four networks \"\"\" def __init__(self, quick = False, ants_path", "(numpy array) t1c_v = t1c volume (numpy array) t2_v =", "array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array[:,:,3] = t1_slice transformed_array", "nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path, name + 't2.nii.gz')).get_data()", "N, y_max), z_min, min(shape[2] - N, z_max) with torch.no_grad(): for", "= Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy() final_prediction[:, x:x+N,", "mask_path +' 1 1 1') mask = np.uint8(nib.load(mask_path).get_data()) return mask", "class tumorSeg(): \"\"\" class performs segmentation for a given sequence", "np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice", "mask_path +' '+ mask_path +' 0.01 1') os.system(self.ants_path +'ImageMath 3", "t1[0,0,0], low1[0, 2] + t1ce[0,0,0] # ========================================================================= vxf, vxt =", "vyf:vyt, vzf:vzt] high[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt,", "prediction_size, prediction_size)): high = np.zeros((1, 4, high_res_size, high_res_size, high_res_size)) low", "vxt - vxf tyf, tyt = max(0, ll_pad-y), max(0, ll_pad-y)", "t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt,", "# # author: <NAME> # contact: <EMAIL> import torch import", "size == 1 transformed_array = transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3", "quick = False, ants_path = bin_path): device = torch.device(\"cuda:0\" if", "= nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data() flair =", "= max(0, hl_pad-x), max(0, hl_pad-x) + vxt - vxf tyf,", "========================= quick: True (just evaluates on Dual path network (BNet3D)", "brats format other with any random format step followed for", "= BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D", "transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in tqdm(range(flair_volume.shape[2])): flair_slice =", "[0.229, 0.224, 0.225]) transformList = [] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output", "0] + flair[0,0,0], low[0, 1] + t2[0,0,0], low[0, 2] +", "map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet =", "tqdm(range(x_min, x_max - prediction_size, prediction_size)): for y in (range(y_min, y_max", "data not in brats format if save_path provided function saves", "low1[0, 3] = low1[0, 0] + flair[0,0,0], low1[0, 1] +", "low[0, 3] = low[0, 0] + flair[0,0,0], low[0, 1] +", "torch.cuda.is_available() else \"cpu\") # device = \"cpu\" map_location = device", "= max(0, hl_pad-y), max(0, hl_pad-y) + vyt - vyf tzf,", "low1[0, 2], low1[0, 3] = low1[0, 0] + flair[0,0,0], low1[0,", "for slices in tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices])", "self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits", "tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 2, txf:txt, tyf:tyt,", "BNet3D prediction_size = mid inference patch size \"\"\" t1 =", "0.224, 0.225]) transformList = [] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output =", "tqdm(range(x_min, x_max, N//2)): for y in range(y_min, y_max, N//2): for", "y+lr_pad) vzf, vzt = max(0, z-ll_pad), min(shape[2], z+lr_pad) txf, txt", "+' 1') os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+ mask_path +' '+", "= combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if", "positives outside the brain Air Brain Lesson model (2D model,", "os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air brain", "= pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_DualPath(self, t1,", "pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume,", "saves the prediction with DeepBrainSeg_Prediction.nii.qz name in the provided directory", "final_prediction[:, x:x+N, y:y+N, z:z+N] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return", "final_pred) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save_path: os.makedirs(save_path,", "3. MNet2D 57 layered convolutional network for inner class classification", "16 low_res_size = int(51*resize_to/19) hl_pad = (high_res_size - prediction_size)//2 hr_pad", "t1_path): \"\"\" We make use of ants framework for generalized", ":, :, :] = t1ce[x:x+N, y:y+N, z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float()", "expanduser home = expanduser(\"~\") #======================================================================================== # prediction functions..................... bin_path =", "import FCDenseNet103 self.ABLnclasses = 3 self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses)", "low_res_size = int(51*resize_to/19) hl_pad = (high_res_size - prediction_size)//2 hr_pad =", "high[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] #", "adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1, t1ce, t2, flair, brain_mask,", "performs segmentation for a given sequence of patient data. to", "for slices in tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices])) t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices]))", "final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self, t1, t1ce, t2,", "prediction with DeepBrainSeg_Prediction.nii.qz name in the provided directory returns: segmentation", "#======================================================================================== ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D", "not quick: # BrainNet3D model...................... from .models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses", "\"\"\" name = path.split(\"/\")[-1] + \"_\" flair = nib.load(os.path.join(path, name", "+' 0.01 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' MD", "mask=None, prediction_size = 9): \"\"\" output of BNet3D prediction_size =", "y_max, N//2): for z in range(z_min, z_max, N//2): high =", "details and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True (just evaluates", "torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") # device = \"cpu\" map_location", "transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0) ## neccessary if batch", "the same location as t1 data directory returns: maskvolume (numpy", "array[:,:,2] = t1ce_slice array[:,:,3] = t1_slice transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array", "# device = \"cpu\" map_location = device #======================================================================================== ckpt_tir2D =", "= t1ce[x:x+N, y:y+N, z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu())", "y_min, y_max, z_min, z_max = bbox(mask, pad = N) x_min,", "txt = max(0, ll_pad-x), max(0, ll_pad-x) + vxt - vxf", "save_path provided function saves the prediction with DeepBrainSeg_Prediction.nii.qz name in", "segmentation mask estimation one for the patient data in brats", "y in range(y_min, y_max, N//2): for z in range(z_min, z_max,", "flair = nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path, name", "= self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1,", "brain_mask) # mask = np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits", "the prediction with DeepBrainSeg_Prediction.nii.qz name in the provided directory returns:", "final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask) final_predictionBNET3D_logits =", "tqdm import tqdm import pdb import os from ..helpers.helper import", "os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' Normalize '+", "not in brats format if save_path provided function saves the", "from .models.modelABL import FCDenseNet103 self.ABLnclasses = 3 self.ABLnet = FCDenseNet103(n_classes", "= torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D Loaded ===================================\") self.MNET2D.eval()", "2] + t1ce[0,0,0] low1[0, 0], low1[0, 1], low1[0, 2], low1[0,", "the brain Air Brain Lesson model (2D model, 103 layered)", "= t2_slice array[:,:,2] = t1ce_slice array[:,:,3] = t1_slice transformed_array =", "t1_path, t2_path, t1ce_path, flair_path, save_path = None): \"\"\" Generates segmentation", "= high[0, 0] + flair[0,0,0], high[0, 1] + t2[0,0,0], high[0,", "mask estimation one for the patient data in brats format", "output of BNet3D prediction_size = mid inference patch size \"\"\"", "+' 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' ME '+", "x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction", "low[0, 0], low[0, 1], low[0, 2], low[0, 3] = low[0,", "x 240 generated_output_logits[:,:,:, slices] = logits.transpose(0, 1, 3, 2) final_pred", "any random format step followed for in estimation of segmentation", "= scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice array[:,:,1] =", "(MNet) \"\"\" normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])", "t2, flair).transpose(0, 2, 1, 3) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits])", "= transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) transformList = []", "class classification Dual Path way network 3. MNet2D 57 layered", "= np.uint8(array) transformed_array = transformSequence(array) transformed_array = transformed_array.unsqueeze(0) transformed_array =", "framework for generalized skull stripping t1_path: t1 volume path (str)", "# Tir2D net....................... from .models.modelTir2D import FCDenseNet57 self.Mnclasses = 4", "We make use of ants framework for generalized skull stripping", "directory returns: segmentation mask \"\"\" t1 = nib.load(t1_path).get_data() t2 =", "1, 3, 2) final_pred = apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred) final_pred", "device self.quick = quick self.ants_path = ants_path def get_ants_mask(self, t1_path):", "in the same location as t1 data directory returns: maskvolume", "3 '+ mask_path +' Normalize '+ t1_path) os.system(self.ants_path +'ThresholdImage 3", "ll_pad-y), max(0, ll_pad-y) + vyt - vyf tzf, tzt =", "max(0, hl_pad-x) + vxt - vxf tyf, tyt = max(0,", "\"\"\" class performs segmentation for a given sequence of patient", "functions..................... bin_path = os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\" class performs segmentation", "y:y+prediction_size, z:z+prediction_size] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def", "final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2,", "= hl_pad + prediction_size ll_pad = (low_res_size - prediction_size)//2 lr_pad", "= normalize(t1ce, brain_mask) t2 = normalize(t2, brain_mask) flair = normalize(flair,", "\"\"\" Generates segmentation for the data in BraTs format if", "inference \"\"\" t1 = normalize(t1, brain_mask) t1ce = normalize(t1ce, brain_mask)", "normalize(t1ce, brain_mask) t2 = normalize(t2, brain_mask) flair = normalize(flair, brain_mask)", "low[0, 0] + flair[0,0,0], low[0, 1] + t2[0,0,0], low[0, 2]", "pred = pred.data.numpy() final_prediction[:, x:x+N, y:y+N, z:z+N] = pred[0] final_prediction", "nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data() t1 = nib.load(os.path.join(path, name + 't1.nii.gz')).get_data()", ":, :, :] = t2[x:x+N, y:y+N, z:z+N] high[0, 2, :,", "= max(0, y-hl_pad), min(shape[1], y+hr_pad) vzf, vzt = max(0, z-hl_pad),", "t2, flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array", "return mask def get_localization(self, t1_v, t1c_v, t2_v, flair_v, brain_mask): \"\"\"", "'.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air brain lesion", "= logits.transpose(0, 1, 3, 2) final_pred = apply_argmax_to_logits(generated_output_logits) final_pred =", "self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses) ## intialize the graph saved_parms=torch.load(ckpt_ABL,", "(\"=================================== KAMNET3D Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device) #======================================================================================== #", "saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model with trained params", "low[0, 2], low[0, 3] = low[0, 0] + flair[0,0,0], low[0,", "inner class classification 4. Tir3Dnet 57 layered 3D convolutional network", "normalize(t2_v, brain_mask) flair_v = normalize(flair_v, brain_mask) generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2]))", "'DeepBrainSeg_Prediction')) return final_pred # ======================================================================================== if __name__ == '__main__': ext", "get_segmentation_brats(self, path, save = True): \"\"\" Generates segmentation for the", "range(z_min, z_max, N//2): high = np.zeros((1, 4, N, N, N))", "y:y+N, z:z+N] high[0, 1, :, :, :] = t2[x:x+N, y:y+N,", "t2_slice array[:,:,2] = t1ce_slice array = np.uint8(array) transformed_array = transformSequence(array)", "========================================================================= vxf, vxt = max(0, x-ll_pad), min(shape[0], x+lr_pad) vyf, vyt", "x in tqdm(range(x_min, x_max, N//2)): for y in range(y_min, y_max,", "apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def", "os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar')", "map_location = device #======================================================================================== ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D =", "t1[x:x+N, y:y+N, z:z+N] high[0, 3, :, :, :] = t1ce[x:x+N,", "** 0.5) + 16 low_res_size = int(51*resize_to/19) hl_pad = (high_res_size", "= t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= vxf, vxt = max(0,", "= pred.data.numpy() final_prediction[:, x:x+N, y:y+N, z:z+N] = pred[0] final_prediction =", "= patch size during inference \"\"\" t1 = normalize(t1, brain_mask)", ".models.modelTir3D import FCDenseNet57 self.T3Dnclasses = 5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt", "output of ANTs pieline) \"\"\" t1_v = normalize(t1_v, brain_mask) t1c_v", "from os.path import expanduser home = expanduser(\"~\") #======================================================================================== # prediction", "= flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array[:,:,3] =", "final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max,", "3, :, :, :] = t1ce[x:x+N, y:y+N, z:z+N] high =", "torch.no_grad(): for x in tqdm(range(x_min, x_max, N//2)): for y in", "self.BNET3Dnet = BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"===================================", "int(prediction_size ** 0.5) + 16 low_res_size = int(51*resize_to/19) hl_pad =", "max(0, ll_pad-x), max(0, ll_pad-x) + vxt - vxf tyf, tyt", "high[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0,", "high = Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy() final_prediction[:,", "calculation high_res_size = prediction_size + 16 resize_to = int(prediction_size **", "def __init__(self, quick = False, ants_path = bin_path): device =", "flair[0,0,0], low[0, 1] + t2[0,0,0], low[0, 2] + t1[0,0,0], low[0,", "t1ce_volume, t2_volume, flair_volume): \"\"\" output of 2D tiramisu model (MNet)", "\", path) brain_mask = self.get_ants_mask(os.path.join(path, name + 't2.nii.gz')) # brain_mask", "= torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") # device = \"cpu\"", "= flair volume (numpy array) brain_mask = brain, whole tumor", "- vxf tyf, tyt = max(0, ll_pad-y), max(0, ll_pad-y) +", "vzt - vzf high[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt,", "# author: <NAME> # contact: <EMAIL> import torch import SimpleITK", "txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= vxf,", "array) brain_mask = brain, whole tumor mask (numpy array, output", "\"\"\" output of 3D tiramisu model (tir3Dnet) mask = numpy", "def get_localization(self, t1_v, t1c_v, t2_v, flair_v, brain_mask): \"\"\" ABLnetwork output,", "z_min, min(shape[2] - N, z_max) with torch.no_grad(): for x in", "save = True): \"\"\" Generates segmentation for the data in", "1] + t2[0,0,0], low[0, 2] + t1[0,0,0], low[0, 2] +", "array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2]", "= prediction_size) # obtained by aspect ratio calculation high_res_size =", "#! /usr/bin/env python # -*- coding: utf-8 -*- # #", "max(0, ll_pad-x) + vxt - vxf tyf, tyt = max(0,", "3] = low1[0, 0] + flair[0,0,0], low1[0, 1] + t2[0,0,0],", "'+ mask_path +' 0.01 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path", "of BNet3D prediction_size = mid inference patch size \"\"\" t1", "BraTs format if save True saves the prediction in the", "t1ce = normalize(t1ce, brain_mask) t2 = normalize(t2, brain_mask) flair =", "self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device) #======================================================================================== if not quick: # BrainNet3D", "patient data. to main platform for segmentation mask estimation one", "save: save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred # ======================================================================================== if", "followed for in estimation of segmentation mask 1. ABLnet for", "mask = numpy array output of ABLnet N = patch", "volume (numpy array) t2_v = t2 volume (numpy array) flair_v", "2], low[0, 3] = low[0, 0] + flair[0,0,0], low[0, 1]", "tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= vxf, vxt =", "5 self.BNET3Dnet = BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print", "= self.BNET3Dnet.to(device) #======================================================================================== # Tir3D model................... from .models.modelTir3D import FCDenseNet57", "t2, flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2,", "+ flair[0,0,0], high[0, 1] + t2[0,0,0], high[0, 2] + t1[0,0,0],", "normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) transformList =", "outside the brain Air Brain Lesson model (2D model, 103", "for inner class classification Dual Path way network 3. MNet2D", "network for inner class classification 4. Tir3Dnet 57 layered 3D", "= t2 volume (numpy array) flair_v = flair volume (numpy", "self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2,", "sitk import numpy as np import nibabel as nib from", "batch size == 1 transformed_array = transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()#", "network for inner class classification more on training details and", "self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device)", "perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save: save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction'))", "+ t2[0,0,0], low1[0, 2] + t1[0,0,0], low1[0, 2] + t1ce[0,0,0]", "array output of ABLnet N = patch size during inference", "2] + t1ce[0,0,0] # ========================================================================= vxf, vxt = max(0, x-hl_pad),", "txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 1, txf:txt,", "= N) x_min, x_max, y_min, y_max, z_min, z_max = x_min,", "\") Working on: \", path) brain_mask = self.get_ants_mask(os.path.join(path, name +", "2] + t1[0,0,0], high[0, 2] + t1ce[0,0,0] low[0, 0], low[0,", "for x in tqdm(range(x_min, x_max, N//2)): for y in range(y_min,", "brain_mask) t2 = normalize(t2, brain_mask) flair = normalize(flair, brain_mask) shape", "make use of ants framework for generalized skull stripping t1_path:", "final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume):", "numpy as np import nibabel as nib from torch.autograd import", "min(shape[1] - N, y_max), z_min, min(shape[2] - N, z_max) with", "max(0, y-ll_pad), min(shape[1], y+lr_pad) vzf, vzt = max(0, z-ll_pad), min(shape[2],", "location as t1 data directory returns: maskvolume (numpy uint8 type)", "convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume): \"\"\" output", "x_max, y_min, y_max, z_min, z_max = bbox(mask, pad = prediction_size)", "= t1_slice transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0) ## neccessary", "vyf:vyt, vzf:vzt] # ========================================================================= low1[0] = [resize(low[0, i, :, :,", "+ strftime(\"%a, %d %b %Y %H:%M:%S +0000\", gmtime()) + \")", "z_min, z_max = x_min, min(shape[0] - N, x_max), y_min, min(shape[1]", "= perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save: save_volume(final_pred, affine, os.path.join(path,", "hl_pad-y) + vyt - vyf tzf, tzt = max(0, hl_pad-z),", "print (\"=================================== KAMNET3D Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device) #========================================================================================", "z:z+N] high[0, 1, :, :, :] = t2[x:x+N, y:y+N, z:z+N]", "+'CopyImageHeaderInformation '+ t1_path+' '+ mask_path +' '+ mask_path +' 1", "low1[0, 2] + t1[0,0,0], low1[0, 2] + t1ce[0,0,0] # =========================================================================", "np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice", "tumor mask (numpy array, output of ANTs pieline) \"\"\" t1_v", "saves the prediction in the save directory in the patients", "tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 1, txf:txt, tyf:tyt,", "DeepBrainSeg] (\" + strftime(\"%a, %d %b %Y %H:%M:%S +0000\", gmtime())", "t1[0,0,0], high[0, 2] + t1ce[0,0,0] low[0, 0], low[0, 1], low[0,", "KAMNET3D Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device) #======================================================================================== # Tir3D", "vyf tzf, tzt = max(0, ll_pad-z), max(0, ll_pad-z) + vzt", "z in range(z_min, z_max, N//2): high = np.zeros((1, 4, N,", "low[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0,", "transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in tqdm(range(flair_volume.shape[2])): flair_slice", "self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x 240 generated_output_logits[:,:,:, slices] = logits.transpose(0,", "self.ABLnet = self.ABLnet.to(device) #======================================================================================== # Tir2D net....................... from .models.modelTir2D import", "quick: True (just evaluates on Dual path network (BNet3D) else", "hl_pad-z), max(0, hl_pad-z) + vzt - vzf high[0, 0, txf:txt,", "final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits =", "+'ImageMath 3 '+ mask_path +' MD '+ mask_path +' 1')", "False, ants_path = bin_path): device = torch.device(\"cuda:0\" if torch.cuda.is_available() else", "= FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D", "brain_mask = brain, whole tumor mask (numpy array, output of", "+ t1[0,0,0], high[0, 2] + t1ce[0,0,0] low[0, 0], low[0, 1],", "self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"===================================", "= max(0, y-ll_pad), min(shape[1], y+lr_pad) vzf, vzt = max(0, z-ll_pad),", "hl_pad = (high_res_size - prediction_size)//2 hr_pad = hl_pad + prediction_size", "high[0, 0] + flair[0,0,0], high[0, 1] + t2[0,0,0], high[0, 2]", "output of ABLnet N = patch size during inference \"\"\"", "torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet", "np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array", "t2, flair, brain_mask, mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair,", "brain_mask) t2_v = normalize(t2_v, brain_mask) flair_v = normalize(flair_v, brain_mask) generated_output_logits", "- N, y_max), z_min, min(shape[2] - N, z_max) with torch.no_grad():", "(range(y_min, y_max - prediction_size, prediction_size)): for z in (range(z_min, z_max", "if save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred", "volume path (str) saves the mask in the same location", "gmtime()) + \") Working on: \", path) brain_mask = self.get_ants_mask(os.path.join(path,", "batch_size final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min,", "if save True saves the prediction in the save directory", "tqdm import pdb import os from ..helpers.helper import * from", "classification 4. Tir3Dnet 57 layered 3D convolutional network for inner", "\"\"\" ABLnetwork output, finds the brain, Whole tumor region t1_v", "np.zeros((1, 4, resize_to, resize_to, resize_to)) high[0, 0], high[0, 1], high[0,", "get_segmentation(self, t1_path, t2_path, t1ce_path, flair_path, save_path = None): \"\"\" Generates", "self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce,", "brain_mask) mask = np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits =", "given sequence of patient data. to main platform for segmentation", "= nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path) mask = self.get_localization(t1, t1ce, t2,", "path.split(\"/\")[-1] + \"_\" flair = nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data() t1", "z_max - prediction_size, prediction_size)): high = np.zeros((1, 4, high_res_size, high_res_size,", "vyf:vyt, vzf:vzt] high[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt,", "the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model with", "in BraTs format if save True saves the prediction in", "tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= low1[0] =", "brain, Whole tumor region t1_v = t1 volume (numpy array)", "transformList = [] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for", "y_min, y_max, z_min, z_max = bbox(mask, pad = prediction_size) #", "\"\"\" output of 2D tiramisu model (MNet) \"\"\" normalize =", "self.get_ants_mask(os.path.join(path, name + 't2.nii.gz')) # brain_mask = get_brain_mask(t1) mask =", "final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred", "torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D Loaded =================================\") self.BNET3Dnet.eval() self.BNET3Dnet", "mask in the same location as t1 data directory returns:", "adjust_classes(final_pred) if save: save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred #", "high_res_size = prediction_size + 16 resize_to = int(prediction_size ** 0.5)", "t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= low1[0] = [resize(low[0, i, :,", "as np import nibabel as nib from torch.autograd import Variable", "to exclude batch_size final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2])) x_min,", "region t1_v = t1 volume (numpy array) t1c_v = t1c", "1. ABLnet for reducing false positives outside the brain Air", "pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction)", "%H:%M:%S +0000\", gmtime()) + \") Working on: \", path) brain_mask", "quick self.ants_path = ants_path def get_ants_mask(self, t1_path): \"\"\" We make", "= np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice =", "transformed_array = transformed_array.unsqueeze(0) ## neccessary if batch size == 1", "tyf, tyt = max(0, hl_pad-y), max(0, hl_pad-y) + vyt -", "= transformSequence(array) transformed_array = transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device) outs =", "torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred = pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size]", "= nib.load(os.path.join(path, name + 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine", "t2[x:x+N, y:y+N, z:z+N] high[0, 2, :, :, :] = t1[x:x+N,", "evaluates on Dual path network (BNet3D) else copmutes an ensumble", "class classification more on training details and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>)", "self.BNET3Dnet.eval() self.BNET3Dnet = self.BNET3Dnet.to(device) #======================================================================================== # Tir3D model................... from .models.modelTir3D", "= self.get_ants_mask(os.path.join(path, name + 't2.nii.gz')) # brain_mask = get_brain_mask(t1) mask", "params print (\"=================================== ABLNET2D Loaded =================================\") self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device)", "'t2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg] (\"", "ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air brain lesion segmentation..............", "2] + t1[0,0,0], low[0, 2] + t1ce[0,0,0] low1[0, 0], low1[0,", "= Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu())", "= self.get_localization(t1, t1ce, t2, flair, brain_mask) # mask = np.swapaxes(mask,1,", "vyf:vyt, vzf:vzt] low[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt,", "prediction in the save directory in the patients data path", "main platform for segmentation mask estimation one for the patient", "resize_to, resize_to)) high[0, 0], high[0, 1], high[0, 2], high[0, 3]", "get_localization(self, t1_v, t1c_v, t2_v, flair_v, brain_mask): \"\"\" ABLnetwork output, finds", "os.system(self.ants_path +'ThresholdImage 3 '+ mask_path +' '+ mask_path +' 0.01", "to exclude batch_size final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2])) x_min,", "\"\"\" Generates segmentation for the data not in brats format", "vzt = max(0, z-hl_pad), min(shape[2], z+hr_pad) txf, txt = max(0,", "vxt - vxf tyf, tyt = max(0, hl_pad-y), max(0, hl_pad-y)", "vzf:vzt] high[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt]", "ll_pad-z), max(0, ll_pad-z) + vzt - vzf low[0, 0, txf:txt,", "txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 2, txf:txt,", "= t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 3, txf:txt, tyf:tyt, tzf:tzt] =", "'+ mask_path +' ME '+ mask_path +' 1') os.system(self.ants_path +'CopyImageHeaderInformation", "layered) 2. BNet3Dnet 3D network for inner class classification Dual", "t1ce, t2, flair, brain_mask) mask = np.swapaxes(mask,1, 0) if not", "tiramisu model (MNet) \"\"\" normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229,", "np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max, z_min, z_max", "MNET2D Loaded ===================================\") self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device) #======================================================================================== if not", "t1 = normalize(t1, brain_mask) t1ce = normalize(t1ce, brain_mask) t2 =", "'t1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path, name + 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name", "'mask.nii.gz') os.system(self.ants_path +'ImageMath 3 '+ mask_path +' Normalize '+ t1_path)", "+ 16 low_res_size = int(51*resize_to/19) hl_pad = (high_res_size - prediction_size)//2", "= bbox(mask, pad = N) x_min, x_max, y_min, y_max, z_min,", "ll_pad-x) + vxt - vxf tyf, tyt = max(0, ll_pad-y),", "= device #======================================================================================== ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home,", "min(shape[2], z+lr_pad) txf, txt = max(0, ll_pad-x), max(0, ll_pad-x) +", "brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1, 3)", "# brain_mask = get_brain_mask(t1) mask = self.get_localization(t1, t1ce, t2, flair,", "z_min, z_max = bbox(mask, pad = N) x_min, x_max, y_min,", "ll_pad-y) + vyt - vyf tzf, tzt = max(0, ll_pad-z),", "y_max - prediction_size, prediction_size)): for z in (range(z_min, z_max -", "True): \"\"\" Generates segmentation for the data in BraTs format", "self.T3Dnclasses = 5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D, map_location=map_location)", "array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array", "3 '+ mask_path +' MD '+ mask_path +' 1') os.system(self.ants_path", "data. to main platform for segmentation mask estimation one for", "= ants_path def get_ants_mask(self, t1_path): \"\"\" We make use of", "3D network for inner class classification Dual Path way network", "high = np.zeros((1, 4, N, N, N)) high[0, 0, :,", "prediction_size)): for y in (range(y_min, y_max - prediction_size, prediction_size)): for", "name + 't2.nii.gz')) # brain_mask = get_brain_mask(t1) mask = self.get_localization(t1,", "vzt - vzf low[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt,", "in brats format if save_path provided function saves the prediction", "strftime from tqdm import tqdm import pdb import os from", "final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save: save_volume(final_pred, affine,", "low[0, 1] + t2[0,0,0], low[0, 2] + t1[0,0,0], low[0, 2]", "final_pred = apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return", "= self.MNET2D.to(device) #======================================================================================== if not quick: # BrainNet3D model...................... from", "ckpt = torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D Loaded ===================================\")", "self.BNET3Dnet.to(device) #======================================================================================== # Tir3D model................... from .models.modelTir3D import FCDenseNet57 self.T3Dnclasses", "batch_size final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min,", "nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data() affine = nib.load(flair_path).affine", "brain Air Brain Lesson model (2D model, 103 layered) 2.", "z:z+N] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_DualPath(self,", "one for the patient data in brats format other with", "t1_v = normalize(t1_v, brain_mask) t1c_v = normalize(t1c_v, brain_mask) t2_v =", "brats format if save_path provided function saves the prediction with", "normalize(flair_v, brain_mask) generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in tqdm(range(flair_v.shape[2])):", "combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask, final_pred) final_pred =", "max(0, ll_pad-y), max(0, ll_pad-y) + vyt - vyf tzf, tzt", "brain_mask) t1ce = normalize(t1ce, brain_mask) t2 = normalize(t2, brain_mask) flair", "ensumble over all four networks \"\"\" def __init__(self, quick =", "nib.load(os.path.join(path, name + 'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg] (\" + strftime(\"%a,", "# obtained by aspect ratio calculation high_res_size = prediction_size +", "Brain Lesson model (2D model, 103 layered) 2. BNet3Dnet 3D", "prediction_size)): for z in (range(z_min, z_max - prediction_size, prediction_size)): high", "size \"\"\" t1 = normalize(t1, brain_mask) t1ce = normalize(t1ce, brain_mask)", "brain_mask = self.get_ants_mask(t2_path) mask = self.get_localization(t1, t1ce, t2, flair, brain_mask)", "transformed_array = transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs", "flair_v = normalize(flair_v, brain_mask) generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices", "flair = nib.load(flair_path).get_data() affine = nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path) mask", "y_max, z_min, z_max = bbox(mask, pad = N) x_min, x_max,", "Normalize '+ t1_path) os.system(self.ants_path +'ThresholdImage 3 '+ mask_path +' '+", "brain_mask, mask=None, prediction_size = 9): \"\"\" output of BNet3D prediction_size", "4, N, N, N)) high[0, 0, :, :, :] =", "= convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume): \"\"\"", "affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred def get_segmentation_brats(self, path, save =", "+'ImageMath 3 '+ mask_path +' Normalize '+ t1_path) os.system(self.ants_path +'ThresholdImage", "+ vxt - vxf tyf, tyt = max(0, ll_pad-y), max(0,", "y-hl_pad), min(shape[1], y+hr_pad) vzf, vzt = max(0, z-hl_pad), min(shape[2], z+hr_pad)", "__init__(self, quick = False, ants_path = bin_path): device = torch.device(\"cuda:0\"", "+' '+ mask_path +' 1 1 1') mask = np.uint8(nib.load(mask_path).get_data())", "brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionTir3D_logits,", "get_ants_mask(self, t1_path): \"\"\" We make use of ants framework for", "= nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data() affine = nib.load(flair_path).affine brain_mask =", "+ 't1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data() t2 =", "self.B3Dnclasses = 5 self.BNET3Dnet = BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D, map_location=map_location)", "= t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 2, txf:txt, tyf:tyt, tzf:tzt] =", "- N, z_max) with torch.no_grad(): for x in tqdm(range(x_min, x_max,", "1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 2,", "mask_path +' 0.01 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +'", "generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in tqdm(range(flair_volume.shape[2])): flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices]))", "from .models.modelTir2D import FCDenseNet57 self.Mnclasses = 4 self.MNET2D = FCDenseNet57(self.Mnclasses)", "tzf, tzt = max(0, hl_pad-z), max(0, hl_pad-z) + vzt -", "tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 3, txf:txt, tyf:tyt,", "t1_path+' '+ mask_path +' '+ mask_path +' 1 1 1')", "torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy() final_prediction[:, x:x+N, y:y+N, z:z+N] = pred[0]", "= self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x 240 generated_output_logits[:,:,:, slices] =", "bin_path): device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") # device", "t2, flair) final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred =", "t2_slice array[:,:,2] = t1ce_slice array[:,:,3] = t1_slice transformed_array = torch.from_numpy(convert_image(array)).float()", "the patient data in brats format other with any random", "z_min, z_max = bbox(mask, pad = prediction_size) # obtained by", "int(51*resize_to/19) hl_pad = (high_res_size - prediction_size)//2 hr_pad = hl_pad +", "gmtime, strftime from tqdm import tqdm import pdb import os", "= t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 3, txf:txt, tyf:tyt, tzf:tzt] =", "skull stripping t1_path: t1 volume path (str) saves the mask", "in tqdm(range(x_min, x_max, N//2)): for y in range(y_min, y_max, N//2):", "way network 3. MNet2D 57 layered convolutional network for inner", "- prediction_size, prediction_size)): high = np.zeros((1, 4, high_res_size, high_res_size, high_res_size))", "+ flair[0,0,0], low[0, 1] + t2[0,0,0], low[0, 2] + t1[0,0,0],", "during inference \"\"\" t1 = normalize(t1, brain_mask) t1ce = normalize(t1ce,", "103 layered) 2. BNet3Dnet 3D network for inner class classification", "vyt - vyf tzf, tzt = max(0, ll_pad-z), max(0, ll_pad-z)", "affine = nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path) mask = self.get_localization(t1, t1ce,", "ll_pad + prediction_size for x in tqdm(range(x_min, x_max - prediction_size,", "t2 volume (numpy array) flair_v = flair volume (numpy array)", "final_pred = adjust_classes(final_pred) if save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path,", "nibabel as nib from torch.autograd import Variable from skimage.transform import", "self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device) #======================================================================================== # Tir2D net....................... from .models.modelTir2D", "map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet =", "t2, flair, brain_mask, mask=None, prediction_size = 9): \"\"\" output of", "+ prediction_size ll_pad = (low_res_size - prediction_size)//2 lr_pad = ll_pad", "vzf:vzt] low[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt]", "N, z_max) with torch.no_grad(): for x in tqdm(range(x_min, x_max, N//2)):", "= self.Tir3Dnet.to(device) #======================================================================================== self.device = device self.quick = quick self.ants_path", "resize_to = int(prediction_size ** 0.5) + 16 low_res_size = int(51*resize_to/19)", "low1[0] = [resize(low[0, i, :, :, :], (resize_to, resize_to, resize_to))", "model, 103 layered) 2. BNet3Dnet 3D network for inner class", "self.get_ants_mask(t2_path) mask = self.get_localization(t1, t1ce, t2, flair, brain_mask) # mask", "x:x+N, y:y+N, z:z+N] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction", "ll_pad = (low_res_size - prediction_size)//2 lr_pad = ll_pad + prediction_size", "saves the mask in the same location as t1 data", "flair volume (numpy array) brain_mask = brain, whole tumor mask", "transformed_array.unsqueeze(0) ## neccessary if batch size == 1 transformed_array =", "high[0, 2] + t1ce[0,0,0] low[0, 0], low[0, 1], low[0, 2],", "t1ce, t2, flair, brain_mask, mask=None, prediction_size = 9): \"\"\" output", "with trained params print (\"=================================== ABLNET2D Loaded =================================\") self.ABLnet.eval() self.ABLnet", "low = np.zeros((1, 4, low_res_size, low_res_size, low_res_size)) low1 = np.zeros((1,", "= self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else:", "print (\"=================================== ABLNET2D Loaded =================================\") self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device) #========================================================================================", "FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D Loaded", "four networks \"\"\" def __init__(self, quick = False, ants_path =", "t1_v, t1c_v, t2_v, flair_v, brain_mask): \"\"\" ABLnetwork output, finds the", "Path way network 3. MNet2D 57 layered convolutional network for", "hl_pad-x) + vxt - vxf tyf, tyt = max(0, hl_pad-y),", "N = patch size during inference \"\"\" t1 = normalize(t1,", "0], high[0, 1], high[0, 2], high[0, 3] = high[0, 0]", "high[0, 3, :, :, :] = t1ce[x:x+N, y:y+N, z:z+N] high", "\"cpu\" map_location = device #======================================================================================== ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D", "z:z+N] high[0, 3, :, :, :] = t1ce[x:x+N, y:y+N, z:z+N]", "tzt = max(0, ll_pad-z), max(0, ll_pad-z) + vzt - vzf", "0], low[0, 1], low[0, 2], low[0, 3] = low[0, 0]", "array) t1c_v = t1c volume (numpy array) t2_v = t2", "4. Tir3Dnet 57 layered 3D convolutional network for inner class", "low[0, 1], low[0, 2], low[0, 3] = low[0, 0] +", "as t1 data directory returns: maskvolume (numpy uint8 type) \"\"\"", "\"\"\" mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath 3 '+ mask_path", "name + 't2.nii.gz')).get_data() affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine print (\"[INFO:", "map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print (\"=================================== MNET2D Loaded ===================================\") self.MNET2D.eval() self.MNET2D =", "if not self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask,", "else copmutes an ensumble over all four networks \"\"\" def", "generalized skull stripping t1_path: t1 volume path (str) saves the", "z-hl_pad), min(shape[2], z+hr_pad) txf, txt = max(0, hl_pad-x), max(0, hl_pad-x)", "0.406], [0.229, 0.224, 0.225]) transformList = [] transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList)", "transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1,", "self.MNET2D = self.MNET2D.to(device) #======================================================================================== if not quick: # BrainNet3D model......................", "prediction_size ll_pad = (low_res_size - prediction_size)//2 lr_pad = ll_pad +", "t1ce, t2, flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair)", "y:y+N, z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred =", "x 240 x 240 generated_output_logits[:,:,:, slices] = logits.transpose(0, 1, 3,", "os.system(self.ants_path +'ImageMath 3 '+ mask_path +' ME '+ mask_path +'", "flair, brain_mask, mask) final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask,", "-*- # # author: <NAME> # contact: <EMAIL> import torch", "+' 1 1 1') mask = np.uint8(nib.load(mask_path).get_data()) return mask def", "mask \"\"\" t1 = nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data() t1ce =", "\"\"\" normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) transformList", "#======================================================================================== # air brain lesion segmentation.............. from .models.modelABL import FCDenseNet103", "2, 1, 3) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits", "\"cpu\") # device = \"cpu\" map_location = device #======================================================================================== ckpt_tir2D", "perform_postprocessing(final_pred) final_pred = adjust_classes(final_pred) if save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine,", "0] + flair[0,0,0], high[0, 1] + t2[0,0,0], high[0, 2] +", "prediction_size = 9): \"\"\" output of BNet3D prediction_size = mid", "mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath 3 '+ mask_path +'", "with any random format step followed for in estimation of", "t2[0,0,0], low[0, 2] + t1[0,0,0], low[0, 2] + t1ce[0,0,0] low1[0,", "(str) saves the mask in the same location as t1", "self.MNET2D.to(device) #======================================================================================== if not quick: # BrainNet3D model...................... from .models.model3DBNET", "ANTs pieline) \"\"\" t1_v = normalize(t1_v, brain_mask) t1c_v = normalize(t1c_v,", "## fill the model with trained params print (\"=================================== ABLNET2D", "min(shape[2] - N, z_max) with torch.no_grad(): for x in tqdm(range(x_min,", "normalize(t2, brain_mask) flair = normalize(flair, brain_mask) shape = t1.shape #", "Working on: \", path) brain_mask = self.get_ants_mask(os.path.join(path, name + 't2.nii.gz'))", "os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred # ======================================================================================== if __name__ == '__main__':", "bin_path = os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\" class performs segmentation for", "z_max = bbox(mask, pad = prediction_size) # obtained by aspect", "========================================================================= vxf, vxt = max(0, x-hl_pad), min(shape[0], x+hr_pad) vyf, vyt", "resize_to)) for i in range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float() low1 =", "tyf, tyt = max(0, ll_pad-y), max(0, ll_pad-y) + vyt -", "max(0, z-hl_pad), min(shape[2], z+hr_pad) txf, txt = max(0, hl_pad-x), max(0,", "= torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D Loaded =================================\") self.BNET3Dnet.eval()", "for x in tqdm(range(x_min, x_max - prediction_size, prediction_size)): for y", "txf, txt = max(0, ll_pad-x), max(0, ll_pad-x) + vxt -", ":] = flair[x:x+N, y:y+N, z:z+N] high[0, 1, :, :, :]", "'.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #========================================================================================", "the provided directory returns: segmentation mask \"\"\" t1 = nib.load(t1_path).get_data()", "in the save directory in the patients data path returns", "ll_pad-z) + vzt - vzf low[0, 0, txf:txt, tyf:tyt, tzf:tzt]", "same location as t1 data directory returns: maskvolume (numpy uint8", "1') os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+ mask_path +' '+ mask_path", "generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in tqdm(range(flair_v.shape[2])): flair_slice =", ":] = t1ce[x:x+N, y:y+N, z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float() pred =", "BrainNet_3D_Inception() ckpt = torch.load(ckpt_BNET3D, map_location=map_location) self.BNET3Dnet.load_state_dict(ckpt['state_dict']) print (\"=================================== KAMNET3D Loaded", "os.path import expanduser home = expanduser(\"~\") #======================================================================================== # prediction functions.....................", "path (str) saves the mask in the same location as", "4, resize_to, resize_to, resize_to)) high[0, 0], high[0, 1], high[0, 2],", "shape = t1.shape # to exclude batch_size final_prediction = np.zeros((self.B3Dnclasses,", "mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits,", "import expanduser home = expanduser(\"~\") #======================================================================================== # prediction functions..................... bin_path", "ABLnet for reducing false positives outside the brain Air Brain", "t1 data directory returns: maskvolume (numpy uint8 type) \"\"\" mask_path", "vzf:vzt] high[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt]", "np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4))", "final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred)) return np.uint8(final_pred) def inner_class_classification_with_logits_NCube(self,", "vxf tyf, tyt = max(0, hl_pad-y), max(0, hl_pad-y) + vyt", "import pdb import os from ..helpers.helper import * from os.path", "low1[0, 1], low1[0, 2], low1[0, 3] = low1[0, 0] +", "home = expanduser(\"~\") #======================================================================================== # prediction functions..................... bin_path = os.path.join('/opt/ANTs/bin/')", "- vzf low[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt,", "the data not in brats format if save_path provided function", "t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt,", "240 x 240 generated_output_logits[:,:,:, slices] = logits.transpose(0, 1, 3, 2)", "from .models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses = 5 self.BNET3Dnet = BrainNet_3D_Inception()", ":, :] = t1[x:x+N, y:y+N, z:z+N] high[0, 3, :, :,", "flair).transpose(0, 2, 1, 3) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else:", "segmentation for the data in BraTs format if save True", "+ vxt - vxf tyf, tyt = max(0, hl_pad-y), max(0,", "+ vyt - vyf tzf, tzt = max(0, ll_pad-z), max(0,", "vyt = max(0, y-ll_pad), min(shape[1], y+lr_pad) vzf, vzt = max(0,", "= x_min, min(shape[0] - N, x_max), y_min, min(shape[1] - N,", "np.zeros((1, 4, N, N, N)) high[0, 0, :, :, :]", "on: \", path) brain_mask = self.get_ants_mask(os.path.join(path, name + 't2.nii.gz')) #", "for in estimation of segmentation mask 1. ABLnet for reducing", "returns: segmentation mask \"\"\" t1 = nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data()", "+0000\", gmtime()) + \") Working on: \", path) brain_mask =", "model (tir3Dnet) mask = numpy array output of ABLnet N", "of ANTs pieline) \"\"\" t1_v = normalize(t1_v, brain_mask) t1c_v =", "prediction_size)//2 lr_pad = ll_pad + prediction_size for x in tqdm(range(x_min,", "return outs def get_segmentation(self, t1_path, t2_path, t1ce_path, flair_path, save_path =", "y_max), z_min, min(shape[2] - N, z_max) with torch.no_grad(): for x", "tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 2, txf:txt, tyf:tyt,", "x_max, y_min, y_max, z_min, z_max = bbox(mask, pad = N)", "output, finds the brain, Whole tumor region t1_v = t1", "nib.load(os.path.join(path, name + 't1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data()", "transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x 240 generated_output_logits[:,:,:,", ":, :, :] = t1[x:x+N, y:y+N, z:z+N] high[0, 3, :,", "in estimation of segmentation mask 1. ABLnet for reducing false", "txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 2, txf:txt,", "= 5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict'])", "+ \"_\" flair = nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data() t1 =", "[resize(low[0, i, :, :, :], (resize_to, resize_to, resize_to)) for i", "txf, txt = max(0, hl_pad-x), max(0, hl_pad-x) + vxt -", "print (\"[INFO: DeepBrainSeg] (\" + strftime(\"%a, %d %b %Y %H:%M:%S", "t1ce = nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path, name", "+ \") Working on: \", path) brain_mask = self.get_ants_mask(os.path.join(path, name", "Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred", "high[0, 2], high[0, 3] = high[0, 0] + flair[0,0,0], high[0,", "path returns : segmentation mask \"\"\" name = path.split(\"/\")[-1] +", "in tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice =", "+ 't2.nii.gz')) # brain_mask = get_brain_mask(t1) mask = self.get_localization(t1, t1ce,", "t2, flair, brain_mask) mask = np.swapaxes(mask,1, 0) if not self.quick:", "model (MNet) \"\"\" normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224,", "estimation of segmentation mask 1. ABLnet for reducing false positives", "in (range(y_min, y_max - prediction_size, prediction_size)): for z in (range(z_min,", "= path.split(\"/\")[-1] + \"_\" flair = nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data()", "from tqdm import tqdm import pdb import os from ..helpers.helper", "y_max, z_min, z_max = bbox(mask, pad = prediction_size) # obtained", "shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max, z_min, z_max =", "output of 3D tiramisu model (tir3Dnet) mask = numpy array", "ants framework for generalized skull stripping t1_path: t1 volume path", "flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array =", "True saves the prediction in the save directory in the", "= np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices]) t1_slice = np.transpose(t1_v[:,:,slices]) array =", "+ prediction_size for x in tqdm(range(x_min, x_max - prediction_size, prediction_size)):", "= os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home,", "torch import SimpleITK as sitk import numpy as np import", "min(shape[1], y+hr_pad) vzf, vzt = max(0, z-hl_pad), min(shape[2], z+hr_pad) txf,", "resize_to, resize_to, resize_to)) high[0, 0], high[0, 1], high[0, 2], high[0,", "'+ t1_path) os.system(self.ants_path +'ThresholdImage 3 '+ mask_path +' '+ mask_path", "+ t1[0,0,0], low[0, 2] + t1ce[0,0,0] low1[0, 0], low1[0, 1],", "= combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask, final_pred) final_pred", "t1ce[x:x+N, y:y+N, z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred", "= self.ABLnet.to(device) #======================================================================================== # Tir2D net....................... from .models.modelTir2D import FCDenseNet57", "postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred) final_pred =", "(numpy uint8 type) \"\"\" mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz') os.system(self.ants_path +'ImageMath", "'t1.nii.gz')).get_data() t1ce = nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data() t2 = nib.load(os.path.join(path,", "= flair[x:x+N, y:y+N, z:z+N] high[0, 1, :, :, :] =", "array[:,:,3] = t1_slice transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array = transformed_array.unsqueeze(0) ##", "- vxf tyf, tyt = max(0, hl_pad-y), max(0, hl_pad-y) +", "= t1ce_slice array[:,:,3] = t1_slice transformed_array = torch.from_numpy(convert_image(array)).float() transformed_array =", "brain, whole tumor mask (numpy array, output of ANTs pieline)", "and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True (just evaluates on", "= t1ce[vxf:vxt, vyf:vyt, vzf:vzt] # ========================================================================= low1[0] = [resize(low[0, i,", "= np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice array[:,:,1] =", "def inner_class_classification_with_logits_NCube(self, t1, t1ce, t2, flair, brain_mask, mask, N =", "MNet2D 57 layered convolutional network for inner class classification 4.", "high[0, 2, :, :, :] = t1[x:x+N, y:y+N, z:z+N] high[0,", "high[0, 2] + t1[0,0,0], high[0, 2] + t1ce[0,0,0] low[0, 0],", "lesion segmentation.............. from .models.modelABL import FCDenseNet103 self.ABLnclasses = 3 self.ABLnet", "return final_prediction def inner_class_classification_with_logits_2D(self, t1ce_volume, t2_volume, flair_volume): \"\"\" output of", "3 '+ mask_path +' ME '+ mask_path +' 1') os.system(self.ants_path", "high[0, 1], high[0, 2], high[0, 3] = high[0, 0] +", "return final_pred def get_segmentation_brats(self, path, save = True): \"\"\" Generates", "ABLnetwork output, finds the brain, Whole tumor region t1_v =", "y in (range(y_min, y_max - prediction_size, prediction_size)): for z in", "max(0, ll_pad-y) + vyt - vyf tzf, tzt = max(0,", "#======================================================================================== # prediction functions..................... bin_path = os.path.join('/opt/ANTs/bin/') class tumorSeg(): \"\"\"", "use of ants framework for generalized skull stripping t1_path: t1", "N, N, N)) high[0, 0, :, :, :] = flair[x:x+N,", "range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred = torch.nn.functional.softmax(self.BNET3Dnet(high,", "= torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu()) pred = pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size,", "= flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array =", "= scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] =", "Tir3D model................... from .models.modelTir3D import FCDenseNet57 self.T3Dnclasses = 5 self.Tir3Dnet", "vzf:vzt] low[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt]", "## intialize the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the", "flair[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt,", "max(0, hl_pad-y), max(0, hl_pad-y) + vyt - vyf tzf, tzt", "final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionMnet_logits])", "pred = pred.numpy() final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0] final_prediction", "save_path: os.makedirs(save_path, exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred def", "BrainNet3D model...................... from .models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses = 5 self.BNET3Dnet", "- prediction_size)//2 lr_pad = ll_pad + prediction_size for x in", "'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg] (\" + strftime(\"%a, %d %b %Y", "t1ce, t2, flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0,", "name + 'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg] (\" + strftime(\"%a, %d", "0.5) + 16 low_res_size = int(51*resize_to/19) hl_pad = (high_res_size -", "-*- coding: utf-8 -*- # # author: <NAME> # contact:", "np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2,", "1 1 1') mask = np.uint8(nib.load(mask_path).get_data()) return mask def get_localization(self,", "mask_path +' 1') os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+ mask_path +'", "t1ce[0,0,0] # ========================================================================= vxf, vxt = max(0, x-hl_pad), min(shape[0], x+hr_pad)", "vyf:vyt, vzf:vzt] # ========================================================================= vxf, vxt = max(0, x-ll_pad), min(shape[0],", "(\"=================================== MNET2D Loaded ===================================\") self.MNET2D.eval() self.MNET2D = self.MNET2D.to(device) #======================================================================================== if", "5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print", "+ 'flair.nii.gz')).affine print (\"[INFO: DeepBrainSeg] (\" + strftime(\"%a, %d %b", "net....................... from .models.modelTir2D import FCDenseNet57 self.Mnclasses = 4 self.MNET2D =", "else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair) final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits", "nib.load(flair_path).affine brain_mask = self.get_ants_mask(t2_path) mask = self.get_localization(t1, t1ce, t2, flair,", "flair = normalize(flair, brain_mask) shape = t1.shape # to exclude", "3 '+ mask_path +' '+ mask_path +' 0.01 1') os.system(self.ants_path", "save_path = None): \"\"\" Generates segmentation for the data not", "low_res_size, low_res_size, low_res_size)) low1 = np.zeros((1, 4, resize_to, resize_to, resize_to))", "copmutes an ensumble over all four networks \"\"\" def __init__(self,", "t1ce, t2, flair, brain_mask, mask, N = 64): \"\"\" output", "mask = np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1,", "for a given sequence of patient data. to main platform", "= t2[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 2, txf:txt, tyf:tyt, tzf:tzt] =", "final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask, final_pred)", "(resize_to, resize_to, resize_to)) for i in range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float()", "= np.swapaxes(mask,1, 0) if not self.quick: final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce,", "3, 2) final_pred = apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred) final_pred =", "data directory returns: maskvolume (numpy uint8 type) \"\"\" mask_path =", "SimpleITK as sitk import numpy as np import nibabel as", "FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D Loaded", "1 1') mask = np.uint8(nib.load(mask_path).get_data()) return mask def get_localization(self, t1_v,", "(2D model, 103 layered) 2. BNet3Dnet 3D network for inner", "flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array[:,:,3] = t1_slice", "i in range(4)] high = Variable(torch.from_numpy(high)).to(self.device).float() low1 = Variable(torch.from_numpy(low1)).to(self.device).float() pred", "= adjust_classes(final_pred) if save: save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred", "self.Tir3Dnet = self.Tir3Dnet.to(device) #======================================================================================== self.device = device self.quick = quick", "shape[1], shape[2])) x_min, x_max, y_min, y_max, z_min, z_max = bbox(mask,", "in (range(z_min, z_max - prediction_size, prediction_size)): high = np.zeros((1, 4,", "= flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 1, txf:txt, tyf:tyt, tzf:tzt] =", "convert5class_logitsto_4class(final_prediction) return final_prediction def inner_class_classification_with_logits_DualPath(self, t1, t1ce, t2, flair, brain_mask,", "flair, brain_mask, mask, N = 64): \"\"\" output of 3D", "# ======================================================================================== if __name__ == '__main__': ext = deepSeg(True) ext.get_segmentation_brats('../../sample_volume/Brats18_CBICA_AVG_1/')", "normalize(t1, brain_mask) t1ce = normalize(t1ce, brain_mask) t2 = normalize(t2, brain_mask)", "* from os.path import expanduser home = expanduser(\"~\") #======================================================================================== #", "air brain lesion segmentation.............. from .models.modelABL import FCDenseNet103 self.ABLnclasses =", "= normalize(t2_v, brain_mask) flair_v = normalize(flair_v, brain_mask) generated_output_logits = np.empty((self.ABLnclasses,", "high[0, 0], high[0, 1], high[0, 2], high[0, 3] = high[0,", "= transformed_array.unsqueeze(0) ## neccessary if batch size == 1 transformed_array", "tyt = max(0, hl_pad-y), max(0, hl_pad-y) + vyt - vyf", "(tir3Dnet) mask = numpy array output of ABLnet N =", "ll_pad-x), max(0, ll_pad-x) + vxt - vxf tyf, tyt =", "= os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar') ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home,", "transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) transformList = [] transformList.append(transforms.ToTensor())", "z:z+N] high = Variable(torch.from_numpy(high)).to(self.device).float() pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu()) pred = pred.data.numpy()", "FCDenseNet103 self.ABLnclasses = 3 self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses) ##", "t1 = nib.load(t1_path).get_data() t2 = nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data() flair", "tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice = np.transpose(t2_v[:,:,slices]) t1ce_slice = np.transpose(t1c_v[:,:,slices])", "vyt - vyf tzf, tzt = max(0, hl_pad-z), max(0, hl_pad-z)", "t1.shape # to exclude batch_size final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1],", "strftime(\"%a, %d %b %Y %H:%M:%S +0000\", gmtime()) + \") Working", "t1, t1ce, t2, flair, brain_mask, mask=None, prediction_size = 9): \"\"\"", "provided function saves the prediction with DeepBrainSeg_Prediction.nii.qz name in the", "= ll_pad + prediction_size for x in tqdm(range(x_min, x_max -", "'+ mask_path +' 1') os.system(self.ants_path +'ImageMath 3 '+ mask_path +'", "t1_path) os.system(self.ants_path +'ThresholdImage 3 '+ mask_path +' '+ mask_path +'", "t2, flair, brain_mask, mask, N = 64): \"\"\" output of", "(numpy array) flair_v = flair volume (numpy array) brain_mask =", "# BrainNet3D model...................... from .models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses = 5", "def get_segmentation_brats(self, path, save = True): \"\"\" Generates segmentation for", "vzf:vzt] high[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt]", "FCDenseNet57 self.T3Dnclasses = 5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses) ckpt = torch.load(ckpt_tir3D,", "return final_pred # ======================================================================================== if __name__ == '__main__': ext =", "= nib.load(t2_path).get_data() t1ce = nib.load(t1ce_path).get_data() flair = nib.load(flair_path).get_data() affine =", "expanduser(\"~\") #======================================================================================== # prediction functions..................... bin_path = os.path.join('/opt/ANTs/bin/') class tumorSeg():", "array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2]", "2, :, :, :] = t1[x:x+N, y:y+N, z:z+N] high[0, 3,", "flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices]) t2_slice =", "+' ME '+ mask_path +' 1') os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+'", "y_max, z_min, z_max = x_min, min(shape[0] - N, x_max), y_min,", "# Tir3D model................... from .models.modelTir3D import FCDenseNet57 self.T3Dnclasses = 5", "exist_ok=True) save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction')) return final_pred def get_segmentation_brats(self, path,", "shape[2])) x_min, x_max, y_min, y_max, z_min, z_max = bbox(mask, pad", "np.swapaxes(generated_output,1, 2) return outs def get_segmentation(self, t1_path, t2_path, t1ce_path, flair_path,", "0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt] high[0, 1,", "= transformed_array.unsqueeze(0) transformed_array = transformed_array.to(self.device) outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs =", "DeepBrainSeg_Prediction.nii.qz name in the provided directory returns: segmentation mask \"\"\"", "y:y+N, z:z+N] = pred[0] final_prediction = convert5class_logitsto_4class(final_prediction) return final_prediction def", "normalize(t1c_v, brain_mask) t2_v = normalize(t2_v, brain_mask) flair_v = normalize(flair_v, brain_mask)", "\"\"\" t1_v = normalize(t1_v, brain_mask) t1c_v = normalize(t1c_v, brain_mask) t2_v", "scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices])) t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices])) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3)) array[:,:,0] = flair_slice", "np.zeros((1, 4, high_res_size, high_res_size, high_res_size)) low = np.zeros((1, 4, low_res_size,", "for inner class classification more on training details and network", ".models.modelTir2D import FCDenseNet57 self.Mnclasses = 4 self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt", "prediction_size for x in tqdm(range(x_min, x_max - prediction_size, prediction_size)): for", "prediction_size, prediction_size)): for y in (range(y_min, y_max - prediction_size, prediction_size)):", "+ t1ce[0,0,0] # ========================================================================= vxf, vxt = max(0, x-hl_pad), min(shape[0],", "mask, N = 64): \"\"\" output of 3D tiramisu model", "ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar') ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL =", "import numpy as np import nibabel as nib from torch.autograd", "\"\"\" t1 = normalize(t1, brain_mask) t1ce = normalize(t1ce, brain_mask) t2", "aspect ratio calculation high_res_size = prediction_size + 16 resize_to =", "flair) final_prediction_array = np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits)", "= get_brain_mask(t1) mask = self.get_localization(t1, t1ce, t2, flair, brain_mask) mask", "2) final_pred = apply_argmax_to_logits(generated_output_logits) final_pred = perform_postprocessing(final_pred) final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred))", "np.transpose(t1_v[:,:,slices]) array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4)) array[:,:,0] = flair_slice array[:,:,1] = t2_slice", "TIRNET2D Loaded =================================\") self.Tir3Dnet.eval() self.Tir3Dnet = self.Tir3Dnet.to(device) #======================================================================================== self.device =", "= np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max, z_min,", "affine, os.path.join(path, 'DeepBrainSeg_Prediction')) return final_pred # ======================================================================================== if __name__ ==", "3D convolutional network for inner class classification more on training", "(high_res_size - prediction_size)//2 hr_pad = hl_pad + prediction_size ll_pad =", "(just evaluates on Dual path network (BNet3D) else copmutes an", "vxf tyf, tyt = max(0, ll_pad-y), max(0, ll_pad-y) + vyt", "ckpt = torch.load(ckpt_tir3D, map_location=map_location) self.Tir3Dnet.load_state_dict(ckpt['state_dict']) print (\"================================== TIRNET2D Loaded =================================\")", "= np.array([final_predictionMnet_logits]) final_prediction_logits = combine_logits_AM(final_prediction_array) final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred =", "Dual Path way network 3. MNet2D 57 layered convolutional network", "data path returns : segmentation mask \"\"\" name = path.split(\"/\")[-1]", "format other with any random format step followed for in", "time import gmtime, strftime from tqdm import tqdm import pdb", "vxf, vxt = max(0, x-hl_pad), min(shape[0], x+hr_pad) vyf, vyt =", "in the patients data path returns : segmentation mask \"\"\"", "coding: utf-8 -*- # # author: <NAME> # contact: <EMAIL>", "t2_path, t1ce_path, flair_path, save_path = None): \"\"\" Generates segmentation for", ":, :, :] = flair[x:x+N, y:y+N, z:z+N] high[0, 1, :,", "4 self.MNET2D = FCDenseNet57(self.Mnclasses) ckpt = torch.load(ckpt_tir2D, map_location=map_location) self.MNET2D.load_state_dict(ckpt['state_dict']) print", "torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy() outs = np.swapaxes(generated_output,1, 2) return outs def get_segmentation(self, t1_path,", "mid inference patch size \"\"\" t1 = normalize(t1, brain_mask) t1ce", "information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True (just evaluates on Dual path", "from time import gmtime, strftime from tqdm import tqdm import", "other with any random format step followed for in estimation", "= os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar') ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar') #======================================================================================== # air", "= 3 self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses) ## intialize the", "Generates segmentation for the data in BraTs format if save", "from .models.modelTir3D import FCDenseNet57 self.T3Dnclasses = 5 self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses)", "= transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x 240", "import gmtime, strftime from tqdm import tqdm import pdb import", "outs def get_segmentation(self, t1_path, t2_path, t1ce_path, flair_path, save_path = None):", "array) t2_v = t2 volume (numpy array) flair_v = flair", "np import nibabel as nib from torch.autograd import Variable from", "mask def get_localization(self, t1_v, t1c_v, t2_v, flair_v, brain_mask): \"\"\" ABLnetwork", "t2_v = t2 volume (numpy array) flair_v = flair volume", "(\"=================================== ABLNET2D Loaded =================================\") self.ABLnet.eval() self.ABLnet = self.ABLnet.to(device) #======================================================================================== #", "ABLnet N = patch size during inference \"\"\" t1 =", "model...................... from .models.model3DBNET import BrainNet_3D_Inception self.B3Dnclasses = 5 self.BNET3Dnet =", "t2_v, flair_v, brain_mask): \"\"\" ABLnetwork output, finds the brain, Whole", "= normalize(t1c_v, brain_mask) t2_v = normalize(t2_v, brain_mask) flair_v = normalize(flair_v,", "flair) final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits]) else: final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1,", "format if save True saves the prediction in the save", "final_pred = postprocessing_pydensecrf(final_prediction_logits) final_pred = combine_mask_prediction(mask, final_pred) final_pred = perform_postprocessing(final_pred)", "training details and network information: (https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>) ========================= quick: True (just", "2D tiramisu model (MNet) \"\"\" normalize = transforms.Normalize([0.485, 0.456, 0.406],", "i, :, :, :], (resize_to, resize_to, resize_to)) for i in", ":] = t1[x:x+N, y:y+N, z:z+N] high[0, 3, :, :, :]", "an ensumble over all four networks \"\"\" def __init__(self, quick", "= self.ABLnclasses) ## intialize the graph saved_parms=torch.load(ckpt_ABL, map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ##", "flair, brain_mask, mask) final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1,", "= np.zeros((1, 4, N, N, N)) high[0, 0, :, :,", "inner class classification Dual Path way network 3. MNet2D 57", "map_location=map_location) self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model with trained params print", "for reducing false positives outside the brain Air Brain Lesson", "z_max = bbox(mask, pad = N) x_min, x_max, y_min, y_max,", "2) return outs def get_segmentation(self, t1_path, t2_path, t1ce_path, flair_path, save_path", "array[:,:,0] = flair_slice array[:,:,1] = t2_slice array[:,:,2] = t1ce_slice array[:,:,3]", "transformList.append(transforms.ToTensor()) transformList.append(normalize) transformSequence=transforms.Compose(transformList) generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2])) for slices in tqdm(range(flair_volume.shape[2])):", "240 generated_output_logits[:,:,:, slices] = logits.transpose(0, 1, 3, 2) final_pred =", "== 1 transformed_array = transformed_array.to(self.device) logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x", "self.ABLnclasses = 3 self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses) ## intialize", "the brain, Whole tumor region t1_v = t1 volume (numpy", "2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt] low[0, 3,", "# to exclude batch_size final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2]))", "\"\"\" output of BNet3D prediction_size = mid inference patch size", "z+lr_pad) txf, txt = max(0, ll_pad-x), max(0, ll_pad-x) + vxt", "segmentation for a given sequence of patient data. to main", "= np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2])) x_min, x_max, y_min, y_max, z_min,", "save directory in the patients data path returns : segmentation", "Tir2D net....................... from .models.modelTir2D import FCDenseNet57 self.Mnclasses = 4 self.MNET2D", "torch.autograd import Variable from skimage.transform import resize from torchvision import", "= np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2])) for slices in tqdm(range(flair_v.shape[2])): flair_slice = np.transpose(flair_v[:,:,slices])", "t1 volume path (str) saves the mask in the same" ]
[ "for the loglikelihood for x in -1, 0, 1, 2:", "# Should be the same as model0 model1 = ConditionalPoisson(y,", "model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalLogit(y, x, groups=groups)", "1, 1, 2, 2, 2, 2] x1 = np.r_[0, 1,", "be the same as model0 model1 = ConditionalLogit(y, x, groups=groups)", "1, 1, 0, 0, 1, 0] x = x[:, None]", "np.r_[0.4170918], rtol=1e-5) def test_poisson_2d(): y = np.r_[3, 1, 4, 8,", "model._denom(0, x)) assert_allclose(grad, ngrad) # Check the gradient for the", "+ x4\" model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1,", "def test_poisson_1d(): y = np.r_[3, 1, 1, 4, 5, 2,", "Test with formula df = pd.DataFrame({\"y\": y, \"x1\": x[:, 0],", "x1 + x2 + x3 + x4\" model3 = ConditionalPoisson.from_formula(fml,", "= model.fit() # From Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155],", "data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params) def test_lasso_poisson(): np.random.seed(342394)", "= model.score(params) assert_allclose(grad, score, rtol=1e-4) result = model.fit() # From", "0: model1 = ConditionalLogit(y, x, groups=g) else: model1 = ConditionalPoisson(y,", "np.empty((10, 2)) x[:, 0] = x1 x[:, 1] = x2", "group_effects = np.kron(group_effects, np.ones(n // 10)) x = np.random.normal(size=(n, 4))", "\"x1\": x1, \"x2\": x2, \"g\": g}) if j == 0:", "params) + group_effects mean = 1 / (1 + np.exp(-lin_pred))", "= x[:, None] model = ConditionalPoisson(y, x, groups=g) # Check", "np.r_[0, 0, 0, 1, 1, 1, 2, 2, 2, 2]", "lin_pred = np.dot(x, params) + group_effects mean = 1 /", "the loglikelihood for x in -1, 0, 1, 2: grad", "if j == 0: model2 = ConditionalLogit.from_formula( \"y ~ 0", "np.r_[0, 0, 1, 0, 0, 1, 0, 1, 1, 1]", "= 1 / (1 + np.exp(-lin_pred)) y = (np.random.uniform(size=n) <", "= ConditionalPoisson(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression", "result.summary() def test_lasso_logistic(): np.random.seed(3423948) n = 200 groups = np.arange(10)", "+ x2 + x3 + x4\" model3 = ConditionalLogit.from_formula(fml, groups=\"groups\",", "Check the gradient for the denominator of the partial likelihood", "= model.fit() # From Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918],", "1, 4, 5, 2, 0, 1, 6, 2] g =", "x3 + x4\" model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df) result3 =", "statsmodels.discrete.conditional_models import ( ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff import approx_fprime from", "x)) assert_allclose(grad, ngrad, rtol=1e-5) # Check the gradient for the", "= np.random.normal(size=n) g = np.random.randint(0, 25, size=n) x = np.hstack((x1[:,", "def test_lasso_logistic(): np.random.seed(3423948) n = 200 groups = np.arange(10) groups", "# Regression test assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0], rtol=1e-4) #", "np.dot(x, params) + group_effects mean = 1 / (1 +", "x1 + x2\", groups=\"g\", data=df) result2 = model2.fit() assert_allclose(result1.params, result2.params,", "rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5) result.summary() def test_formula(): for j", "as model0 model1 = ConditionalPoisson(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0,", "assert_allclose(grad, ngrad, rtol=1e-5) # Check the gradient for the loglikelihood", "_, grad = model._denom_grad(0, params) ngrad = approx_fprime(params, lambda x:", "x in -1, 0, 1, 2: params = np.r_[x, -1.5*x]", "assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5) result.summary() def test_lasso_logistic(): np.random.seed(3423948) n =", "= x1 x[:, 1] = x2 model = ConditionalLogit(y, x,", "rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4)", "x[:, 0], \"x2\": x[:, 1], \"x3\": x[:, 2], \"x4\": x[:,", "x: model._denom(0, x)) assert_allclose(grad, ngrad) # Check the gradient for", "assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalPoisson(y, x, groups=groups) result2 =", "\"x1\": x[:, 0], \"x2\": x[:, 1], \"x3\": x[:, 2], \"x4\":", "lin_pred = np.dot(x, params) + group_effects mean = np.exp(lin_pred) y", "From Stata assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5)", "0 + x1 + x2\", groups=\"g\", data=df) else: model2 =", "2, 5, 4, 7, 2, 6] g = np.r_[0, 0,", "+ x3 + x4\" model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df) result3", "// 10)) group_effects = np.random.normal(size=10) group_effects = np.kron(group_effects, np.ones(n //", "8, 2, 5, 4, 7, 2, 6] g = np.r_[0,", "0, 1, 0] x2 = np.r_[0, 0, 1, 0, 0,", "2: grad = approx_fprime(np.r_[x, ], model.loglike) score = model.score(np.r_[x, ])", "result1 = model1.fit() df = pd.DataFrame({\"y\": y, \"x1\": x1, \"x2\":", "ngrad, rtol=1e-5) # Check the gradient for the loglikelihood for", "x4\" model3 = ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.05)", "0, 0, 1, 2, 3, 2, 0, 1] x =", "groups=g) # Check the gradient for the denominator of the", "2, 2] x1 = np.r_[0, 1, 0, 0, 1, 1,", "model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression test assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0],", "# From Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def", "np.r_[0, 0, 1, 0] lin_pred = np.dot(x, params) + group_effects", "+ x2\", groups=\"g\", data=df) result2 = model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5)", "# Test with formula df = pd.DataFrame({\"y\": y, \"x1\": x[:,", "2, 3, 2, 0, 1] x = np.empty((10, 2)) x[:,", "= np.r_[0, 1, 0, 0, 1, 1, 0, 0, 1,", "x2 = np.random.normal(size=n) g = np.random.randint(0, 25, size=n) x =", "x4\" model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.2)", "test_formula(): for j in 0, 1: np.random.seed(34234) n = 200", "0, 0, 1, 0] x2 = np.r_[0, 0, 1, 0,", "= ConditionalLogit(y, x, groups=g) # Check the gradient for the", "import assert_allclose import pandas as pd def test_logit_1d(): y =", "= np.r_[0, 0, 1, 0, 0, 1, 0, 1, 1,", "< mean).astype(np.int) model0 = ConditionalLogit(y, x, groups=groups) result0 = model0.fit()", "0, 1, 1, 1] g = np.r_[0, 0, 0, 1,", "= np.r_[3, 1, 1, 4, 5, 2, 0, 1, 6,", "= np.r_[-0.5*x, 0.5*x] grad = approx_fprime(params, model.loglike) score = model.score(params)", "np.r_[0, 0, 0.91697508, 0], rtol=1e-4) # Test with formula df", "group_effects mean = 1 / (1 + np.exp(-lin_pred)) y =", "0], \"x2\": x[:, 1], \"x3\": x[:, 2], \"x4\": x[:, 3],", "4, 7, 2, 6] g = np.r_[0, 0, 0, 1,", "1.361738], rtol=1e-5) result.summary() def test_formula(): for j in 0, 1:", "= 200 groups = np.arange(10) groups = np.kron(groups, np.ones(n //", "ConditionalPoisson(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression test", "+ np.exp(-lin_pred)) y = (np.random.uniform(size=n) < mean).astype(np.int) model0 = ConditionalLogit(y,", "same as model0 model1 = ConditionalLogit(y, x, groups=groups) result1 =", "group_effects mean = np.exp(lin_pred) y = np.random.poisson(mean) model0 = ConditionalPoisson(y,", "np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5) result.summary() def test_lasso_logistic():", "np.r_[x, -1.5*x] _, grad = model._denom_grad(0, params) ngrad = approx_fprime(params,", "= np.random.normal(size=10) group_effects = np.kron(group_effects, np.ones(n // 10)) x =", "0] lin_pred = np.dot(x, params) + group_effects mean = np.exp(lin_pred)", "5, 2, 0, 1, 6, 2] g = np.r_[0, 0,", "result = model.fit() # From Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse,", "model3 = ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params,", "1, 1, 4, 5, 2, 0, 1, 6, 2] g", "groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 =", "0, 1, 0] x2 = np.r_[2, 1, 0, 0, 1,", "as model0 model1 = ConditionalLogit(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0,", "np.r_[0, 0, 0, 0, 1, 1, 1, 1, 1, 1]", "np.random.poisson(mean) model0 = ConditionalPoisson(y, x, groups=groups) result0 = model0.fit() #", "0, 0.55235152, 0], rtol=1e-4) # Test with formula df =", "np.kron(group_effects, np.ones(n // 10)) x = np.random.normal(size=(n, 4)) params =", "1, 2: grad = approx_fprime(np.r_[x, ], model.loglike) score = model.score(np.r_[x,", "model._denom(0, x)) assert_allclose(grad, ngrad, rtol=1e-5) # Check the gradient for", "2] x1 = np.r_[0, 1, 0, 0, 1, 1, 0,", "np.r_[0, 1, 0, 0, 1, 1, 0, 0, 1, 0]", "x = x[:, None] model = ConditionalLogit(y, x, groups=g) #", "= approx_fprime(np.r_[x, ], model.loglike) score = model.score(np.r_[x, ]) assert_allclose(grad, score,", "200 y = np.random.randint(0, 2, size=n) x1 = np.random.normal(size=n) x2", "ConditionalPoisson(y, x, groups=g) result1 = model1.fit() df = pd.DataFrame({\"y\": y,", "grad = approx_fprime(params, model.loglike) score = model.score(params) assert_allclose(grad, score, rtol=1e-4)", "for x in -1, 0, 1, 2: params = np.r_[x,", "approx_fprime(params, model.loglike) score = model.score(params) assert_allclose(grad, score, rtol=1e-4) result =", "-1, 0, 1, 2: params = np.r_[-0.5*x, 0.5*x] grad =", "of the partial likelihood for x in -1, 0, 1,", "test_lasso_poisson(): np.random.seed(342394) n = 200 groups = np.arange(10) groups =", "2, 0, 1, 6, 2] g = np.r_[0, 0, 0,", "the gradient for the denominator of the partial likelihood for", "= x2 model = ConditionalPoisson(y, x, groups=g) # Check the", "assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalLogit(y, x, groups=groups) result2 =", "groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression test assert_allclose(result2.params, np.r_[0,", "ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params) def", "import ( ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff import approx_fprime from numpy.testing", "model1 = ConditionalPoisson(y, x, groups=g) result1 = model1.fit() df =", "/ (1 + np.exp(-lin_pred)) y = (np.random.uniform(size=n) < mean).astype(np.int) model0", "0, 0, 1, 0] x = x[:, None] model =", "np.arange(10) groups = np.kron(groups, np.ones(n // 10)) group_effects = np.random.normal(size=10)", "x2 = np.r_[2, 1, 0, 0, 1, 2, 3, 2,", "np.r_[3, 1, 1, 4, 5, 2, 0, 1, 6, 2]", "= 200 y = np.random.randint(0, 2, size=n) x1 = np.random.normal(size=n)", "y = np.random.randint(0, 2, size=n) x1 = np.random.normal(size=n) x2 =", "result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def test_poisson_1d():", "test assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0], rtol=1e-4) # Test with", "0] x = x[:, None] model = ConditionalLogit(y, x, groups=g)", "= np.random.normal(size=n) x2 = np.random.normal(size=n) g = np.random.randint(0, 25, size=n)", "x, groups=g) result1 = model1.fit() df = pd.DataFrame({\"y\": y, \"x1\":", "assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def test_poisson_1d(): y =", "+ group_effects mean = 1 / (1 + np.exp(-lin_pred)) y", "alpha=0.2) # Regression test assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0], rtol=1e-4)", "model0.fit() # Should be the same as model0 model1 =", "= model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression test assert_allclose(result2.params, np.r_[0, 0, 0.55235152,", "rtol=1e-4) # Test with formula df = pd.DataFrame({\"y\": y, \"x1\":", "np.random.normal(size=n) x2 = np.random.normal(size=n) g = np.random.randint(0, 25, size=n) x", "params = np.r_[0, 0, 1, 0] lin_pred = np.dot(x, params)", "result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalLogit(y,", "assert_allclose(grad, ngrad) # Check the gradient for the loglikelihood for", "def test_poisson_2d(): y = np.r_[3, 1, 4, 8, 2, 5,", "0, 1] x = np.empty((10, 2)) x[:, 0] = x1", "grad = approx_fprime(np.r_[x, ], model.loglike) score = model.score(np.r_[x, ]) assert_allclose(grad,", "0, 1, 2, 3, 2, 0, 1] x = np.empty((10,", "x2, \"g\": g}) if j == 0: model2 = ConditionalLogit.from_formula(", "From Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5)", "pd.DataFrame({\"y\": y, \"x1\": x1, \"x2\": x2, \"g\": g}) if j", "y = (np.random.uniform(size=n) < mean).astype(np.int) model0 = ConditionalLogit(y, x, groups=groups)", "np.random.randint(0, 25, size=n) x = np.hstack((x1[:, None], x2[:, None])) if", "= np.r_[2, 1, 0, 0, 1, 2, 3, 2, 0,", "1], \"x3\": x[:, 2], \"x4\": x[:, 3], \"groups\": groups}) fml", "x1, \"x2\": x2, \"g\": g}) if j == 0: model2", "x1 = np.random.normal(size=n) x2 = np.random.normal(size=n) g = np.random.randint(0, 25,", "= ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params)", "model2 = ConditionalPoisson(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.2) #", "x)) assert_allclose(grad, ngrad) # Check the gradient for the loglikelihood", "ConditionalLogit.from_formula( \"y ~ 0 + x1 + x2\", groups=\"g\", data=df)", "0] x2 = np.r_[2, 1, 0, 0, 1, 2, 3,", "1, 0, 1, 0, 1, 0, 1, 1, 1] g", "0, 1, 0, 1, 1, 1] x = np.empty((10, 2))", "the loglikelihood for x in -1, 0, 1, 2: params", "= np.kron(group_effects, np.ones(n // 10)) x = np.random.normal(size=(n, 4)) params", "result = model.fit() # From Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse,", "x2 + x3 + x4\" model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df)", "== 0: model2 = ConditionalLogit.from_formula( \"y ~ 0 + x1", "rtol=1e-3) model2 = ConditionalLogit(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.05)", "with formula df = pd.DataFrame({\"y\": y, \"x1\": x[:, 0], \"x2\":", "= ConditionalPoisson(y, x, groups=g) result1 = model1.fit() df = pd.DataFrame({\"y\":", "0, 0, 1, 0, 1, 1, 1] x = np.empty((10,", "np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5) result.summary() def test_formula():", "= \"y ~ 0 + x1 + x2 + x3", "rtol=1e-5) def test_poisson_2d(): y = np.r_[3, 1, 4, 8, 2,", "def test_logit_1d(): y = np.r_[0, 1, 0, 1, 0, 1,", "for x in -1, 0, 1, 2: params = np.r_[-0.5*x,", "x1 + x2 + x3 + x4\" model3 = ConditionalLogit.from_formula(fml,", "2, 2, 2, 2] x1 = np.r_[0, 1, 0, 0,", "= ConditionalLogit(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params,", "rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def test_logit_2d(): y = np.r_[0, 1,", "model1 = ConditionalPoisson(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params,", "rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def test_poisson_2d(): y = np.r_[3, 1,", "y = np.r_[3, 1, 1, 4, 5, 2, 0, 1,", "result = model.fit() # From Stata assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3)", "test_poisson_2d(): y = np.r_[3, 1, 4, 8, 2, 5, 4,", "x1 x[:, 1] = x2 model = ConditionalLogit(y, x, groups=g)", "= approx_fprime(params, model.loglike) score = model.score(params) assert_allclose(grad, score, rtol=1e-4) result", "= model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params) def test_lasso_poisson(): np.random.seed(342394) n =", "x1 x[:, 1] = x2 model = ConditionalPoisson(y, x, groups=g)", "+ x1 + x2\", groups=\"g\", data=df) result2 = model2.fit() assert_allclose(result1.params,", "# Check the gradient for the denominator of the partial", "= np.r_[0, 1, 0, 1, 0, 1, 0, 1, 1,", "np.r_[1.420784, 1.361738], rtol=1e-5) result.summary() def test_formula(): for j in 0,", "np.ones(n // 10)) x = np.random.normal(size=(n, 4)) params = np.r_[0,", "same as model0 model1 = ConditionalPoisson(y, x, groups=groups) result1 =", "ConditionalLogit(y, x, groups=g) else: model1 = ConditionalPoisson(y, x, groups=g) result1", "else: model1 = ConditionalPoisson(y, x, groups=g) result1 = model1.fit() df", "groups}) fml = \"y ~ 0 + x1 + x2", "np.r_[1.295155], rtol=1e-5) def test_logit_2d(): y = np.r_[0, 1, 0, 1,", "0, 0, 1, 1, 0, 0, 1, 0] x =", "0, 0, 0, 1, 1, 1, 1, 1, 1] x", "if j == 0: model1 = ConditionalLogit(y, x, groups=g) else:", "x[:, None] model = ConditionalLogit(y, x, groups=g) # Check the", "rtol=1e-5) result.summary() def test_formula(): for j in 0, 1: np.random.seed(34234)", "result0 = model0.fit() # Should be the same as model0", "x1 = np.r_[0, 1, 0, 0, 1, 1, 0, 0,", "pd.DataFrame({\"y\": y, \"x1\": x[:, 0], \"x2\": x[:, 1], \"x3\": x[:,", "y = np.r_[0, 1, 0, 1, 0, 1, 0, 1,", "in -1, 0, 1, 2: grad = approx_fprime(np.r_[x, ], model.loglike)", "for j in 0, 1: np.random.seed(34234) n = 200 y", "+ x2 + x3 + x4\" model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\",", "denominator of the partial likelihood for x in -1, 0,", "ngrad = approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad) #", "(1 + np.exp(-lin_pred)) y = (np.random.uniform(size=n) < mean).astype(np.int) model0 =", "groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression test assert_allclose(result2.params, np.r_[0,", "data=df) result2 = model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5)", "# Rxegression test assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0], rtol=1e-4) #", "x2 model = ConditionalPoisson(y, x, groups=g) # Check the gradient", "x2\", groups=\"g\", data=df) else: model2 = ConditionalPoisson.from_formula( \"y ~ 0", "1, 0, 0, 1, 0, 1, 1, 1] x =", "0, 1, 1, 0, 0, 1, 0] x = x[:,", "1, 0, 0, 1, 0] x2 = np.r_[0, 0, 1,", "assert_allclose(result2.params, result3.params) def test_lasso_poisson(): np.random.seed(342394) n = 200 groups =", "rtol=1e-5) def test_logit_2d(): y = np.r_[0, 1, 0, 1, 0,", "1, 4, 8, 2, 5, 4, 7, 2, 6] g", "Should be the same as model0 model1 = ConditionalPoisson(y, x,", "= np.r_[x, ] _, grad = model._denom_grad(0, params) ngrad =", "np.r_[0, 0, 0.55235152, 0], rtol=1e-4) # Test with formula df", "x[:, 1] = x2 model = ConditionalLogit(y, x, groups=g) #", "x in -1, 0, 1, 2: params = np.r_[x, ]", "np.hstack((x1[:, None], x2[:, None])) if j == 0: model1 =", "None])) if j == 0: model1 = ConditionalLogit(y, x, groups=g)", "= np.hstack((x1[:, None], x2[:, None])) if j == 0: model1", "result1.params, rtol=1e-3) model2 = ConditionalPoisson(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1,", "7, 2, 6] g = np.r_[0, 0, 0, 1, 1,", "1] x = np.empty((10, 2)) x[:, 0] = x1 x[:,", "groups = np.kron(groups, np.ones(n // 10)) group_effects = np.random.normal(size=10) group_effects", "~ 0 + x1 + x2 + x3 + x4\"", "2, 2, 2, 2] x = np.r_[0, 1, 0, 0,", "0, 1, 1, 0, 0, 1, 0] x2 = np.r_[2,", "groups=groups) result0 = model0.fit() # Should be the same as", "ConditionalPoisson(y, x, groups=groups) result0 = model0.fit() # Should be the", "loglikelihood for x in -1, 0, 1, 2: grad =", "], model.loglike) score = model.score(np.r_[x, ]) assert_allclose(grad, score, rtol=1e-4) result", "assert_allclose import pandas as pd def test_logit_1d(): y = np.r_[0,", "1, 0, 0, 1, 0] x2 = np.r_[2, 1, 0,", "0, 1, 0, 0, 1, 0, 1, 1, 1] x", "0 + x1 + x2\", groups=\"g\", data=df) result2 = model2.fit()", "1, 0] x = x[:, None] model = ConditionalLogit(y, x,", "model1.fit() df = pd.DataFrame({\"y\": y, \"x1\": x1, \"x2\": x2, \"g\":", "5, 4, 7, 2, 6] g = np.r_[0, 0, 0,", "ConditionalLogit(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3)", "= pd.DataFrame({\"y\": y, \"x1\": x[:, 0], \"x2\": x[:, 1], \"x3\":", "0, 0.91697508, 0], rtol=1e-4) # Test with formula df =", "formula df = pd.DataFrame({\"y\": y, \"x1\": x[:, 0], \"x2\": x[:,", "1] g = np.r_[0, 0, 0, 1, 1, 1, 2,", "result3.params) def test_lasso_poisson(): np.random.seed(342394) n = 200 groups = np.arange(10)", "rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[1.011074, 1.236758],", "params = np.r_[-0.5*x, 0.5*x] grad = approx_fprime(params, model.loglike) score =", "result2.tvalues, rtol=1e-5) def test_poisson_1d(): y = np.r_[3, 1, 1, 4,", "np.random.randint(0, 2, size=n) x1 = np.random.normal(size=n) x2 = np.random.normal(size=n) g", "result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def test_poisson_1d(): y = np.r_[3,", "1, 1, 1, 1] x = np.r_[0, 1, 0, 0,", "= ConditionalLogit.from_formula( \"y ~ 0 + x1 + x2\", groups=\"g\",", "alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalPoisson(y, x, groups=groups) result2", "= model.fit() # From Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse,", "1] = x2 model = ConditionalPoisson(y, x, groups=g) # Check", "rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5) result.summary() def test_lasso_logistic(): np.random.seed(3423948) n", "lambda x: model._denom(0, x)) assert_allclose(grad, ngrad, rtol=1e-5) # Check the", "score = model.score(params) assert_allclose(grad, score, rtol=1e-4) result = model.fit() #", "x2 = np.r_[0, 0, 1, 0, 0, 1, 0, 1,", "\"x4\": x[:, 3], \"groups\": groups}) fml = \"y ~ 0", "Should be the same as model0 model1 = ConditionalLogit(y, x,", "test assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0], rtol=1e-4) # Test with", "rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5)", "ConditionalLogit(y, x, groups=groups) result0 = model0.fit() # Should be the", "x, groups=g) # Check the gradient for the denominator of", "x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2", "alpha=0.05) assert_allclose(result2.params, result3.params) def test_lasso_poisson(): np.random.seed(342394) n = 200 groups", "= model.fit() # From Stata assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse,", "model.fit() # From Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5)", "0], rtol=1e-4) # Test with formula df = pd.DataFrame({\"y\": y,", "\"y ~ 0 + x1 + x2 + x3 +", "model1 = ConditionalLogit(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params,", "model2 = ConditionalLogit.from_formula( \"y ~ 0 + x1 + x2\",", "1, 0, 0, 1, 1, 0, 0, 1, 0] x2", "0, 1, 6, 2] g = np.r_[0, 0, 0, 0,", "+ x1 + x2 + x3 + x4\" model3 =", "= ConditionalPoisson(y, x, groups=groups) result0 = model0.fit() # Should be", "np.dot(x, params) + group_effects mean = np.exp(lin_pred) y = np.random.poisson(mean)", "rtol=1e-5) # Check the gradient for the loglikelihood for x", "= model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression test assert_allclose(result2.params, np.r_[0, 0, 0.91697508,", "alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalLogit(y, x, groups=groups) result2", "-1, 0, 1, 2: grad = approx_fprime(np.r_[x, ], model.loglike) score", "np.exp(lin_pred) y = np.random.poisson(mean) model0 = ConditionalPoisson(y, x, groups=groups) result0", "ngrad) # Check the gradient for the loglikelihood for x", "result2 = model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression test assert_allclose(result2.params, np.r_[0, 0,", "ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff import approx_fprime from numpy.testing import assert_allclose", "= np.r_[x, -1.5*x] _, grad = model._denom_grad(0, params) ngrad =", "= np.arange(10) groups = np.kron(groups, np.ones(n // 10)) group_effects =", "result2 = model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression test assert_allclose(result2.params, np.r_[0, 0,", "statsmodels.tools.numdiff import approx_fprime from numpy.testing import assert_allclose import pandas as", "+ x1 + x2\", groups=\"g\", data=df) else: model2 = ConditionalPoisson.from_formula(", "0, 1, 1, 1, 1, 1, 1] x = np.r_[0,", "y = np.random.poisson(mean) model0 = ConditionalPoisson(y, x, groups=groups) result0 =", "rtol=1e-5) def test_poisson_1d(): y = np.r_[3, 1, 1, 4, 5,", "mean = 1 / (1 + np.exp(-lin_pred)) y = (np.random.uniform(size=n)", "groups=g) else: model1 = ConditionalPoisson(y, x, groups=g) result1 = model1.fit()", "= ConditionalLogit(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression", "params) ngrad = approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad)", "= np.exp(lin_pred) y = np.random.poisson(mean) model0 = ConditionalPoisson(y, x, groups=groups)", "model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalPoisson(y, x, groups=groups)", "= ConditionalLogit(y, x, groups=g) else: model1 = ConditionalPoisson(y, x, groups=g)", "g = np.r_[0, 0, 0, 1, 1, 1, 2, 2,", "n = 200 y = np.random.randint(0, 2, size=n) x1 =", "1, 0, 0, 1, 2, 3, 2, 0, 1] x", "lambda x: model._denom(0, x)) assert_allclose(grad, ngrad) # Check the gradient", "= x2 model = ConditionalLogit(y, x, groups=g) # Check the", "= ConditionalPoisson(y, x, groups=g) # Check the gradient for the", "np.exp(-lin_pred)) y = (np.random.uniform(size=n) < mean).astype(np.int) model0 = ConditionalLogit(y, x,", "rtol=1e-5) result.summary() def test_lasso_logistic(): np.random.seed(3423948) n = 200 groups =", "1, 1] x = np.r_[0, 1, 0, 0, 1, 1,", "\"x2\": x[:, 1], \"x3\": x[:, 2], \"x4\": x[:, 3], \"groups\":", "1, 2: params = np.r_[x, -1.5*x] _, grad = model._denom_grad(0,", "4)) params = np.r_[0, 0, 1, 0] lin_pred = np.dot(x,", "groups=g) result1 = model1.fit() df = pd.DataFrame({\"y\": y, \"x1\": x1,", "= model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalPoisson(y, x,", "def test_logit_2d(): y = np.r_[0, 1, 0, 1, 0, 1,", "np.random.normal(size=10) group_effects = np.kron(group_effects, np.ones(n // 10)) x = np.random.normal(size=(n,", "rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def test_poisson_1d(): y = np.r_[3, 1,", "np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def test_poisson_2d(): y = np.r_[3,", "model0 = ConditionalLogit(y, x, groups=groups) result0 = model0.fit() # Should", "1, 0] x2 = np.r_[2, 1, 0, 0, 1, 2,", "0, 1, 1, 1, 2, 2, 2, 2] x1 =", "np.r_[3, 1, 4, 8, 2, 5, 4, 7, 2, 6]", "2: params = np.r_[-0.5*x, 0.5*x] grad = approx_fprime(params, model.loglike) score", "model2 = ConditionalPoisson.from_formula( \"y ~ 0 + x1 + x2\",", "1: np.random.seed(34234) n = 200 y = np.random.randint(0, 2, size=n)", "0, 1, 0] x = x[:, None] model = ConditionalLogit(y,", "g = np.random.randint(0, 25, size=n) x = np.hstack((x1[:, None], x2[:,", "the partial likelihood for x in -1, 0, 1, 2:", "approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad, rtol=1e-5) # Check", "assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5) result.summary() def test_formula(): for j in", "\"x2\": x2, \"g\": g}) if j == 0: model2 =", "~ 0 + x1 + x2\", groups=\"g\", data=df) result2 =", "None], x2[:, None])) if j == 0: model1 = ConditionalLogit(y,", "1, 1, 1, 1, 1, 1] x = np.r_[0, 1,", "Rxegression test assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0], rtol=1e-4) # Test", "alpha=0.05) # Rxegression test assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0], rtol=1e-4)", "25, size=n) x = np.hstack((x1[:, None], x2[:, None])) if j", "mean).astype(np.int) model0 = ConditionalLogit(y, x, groups=groups) result0 = model0.fit() #", "2: params = np.r_[x, -1.5*x] _, grad = model._denom_grad(0, params)", "0.5*x] grad = approx_fprime(params, model.loglike) score = model.score(params) assert_allclose(grad, score,", "= np.random.poisson(mean) model0 = ConditionalPoisson(y, x, groups=groups) result0 = model0.fit()", "params = np.r_[x, -1.5*x] _, grad = model._denom_grad(0, params) ngrad", "np.r_[x, ] _, grad = model._denom_grad(0, params) ngrad = approx_fprime(params,", "model.fit() # From Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942,", "group_effects = np.random.normal(size=10) group_effects = np.kron(group_effects, np.ones(n // 10)) x", "import pandas as pd def test_logit_1d(): y = np.r_[0, 1,", "2], \"x4\": x[:, 3], \"groups\": groups}) fml = \"y ~", "np.r_[.3874942, .1686712], rtol=1e-5) result.summary() def test_lasso_logistic(): np.random.seed(3423948) n = 200", "1, 0, 0, 1, 1, 0, 0, 1, 0] x", "0] lin_pred = np.dot(x, params) + group_effects mean = 1", "ConditionalPoisson(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3)", "assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def", "= approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad, rtol=1e-5) #", "# Should be the same as model0 model1 = ConditionalLogit(y,", "np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def test_logit_2d(): y = np.r_[0,", "+ group_effects mean = np.exp(lin_pred) y = np.random.poisson(mean) model0 =", "x[:, 1], \"x3\": x[:, 2], \"x4\": x[:, 3], \"groups\": groups})", "np from statsmodels.discrete.conditional_models import ( ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff import", "test_lasso_logistic(): np.random.seed(3423948) n = 200 groups = np.arange(10) groups =", "for the denominator of the partial likelihood for x in", "1, 1, 0, 0, 1, 0] x2 = np.r_[2, 1,", "= np.random.randint(0, 2, size=n) x1 = np.random.normal(size=n) x2 = np.random.normal(size=n)", "200 groups = np.arange(10) groups = np.kron(groups, np.ones(n // 10))", "= ConditionalPoisson(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params,", "= np.r_[0, 0, 0, 1, 1, 1, 2, 2, 2,", "1, 1, 1, 2, 2, 2, 2] x1 = np.r_[0,", "the gradient for the loglikelihood for x in -1, 0,", "1, 1, 2, 2, 2, 2] x = np.r_[0, 1,", "0, 1, 2: params = np.r_[x, ] _, grad =", "model.score(np.r_[x, ]) assert_allclose(grad, score, rtol=1e-4) result = model.fit() # From", "model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression test assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0],", "0, 1, 2: params = np.r_[-0.5*x, 0.5*x] grad = approx_fprime(params,", "params = np.r_[x, ] _, grad = model._denom_grad(0, params) ngrad", "1, 2: params = np.r_[x, ] _, grad = model._denom_grad(0,", "model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params) def test_lasso_poisson(): np.random.seed(342394) n = 200", "x, groups=g) else: model1 = ConditionalPoisson(y, x, groups=g) result1 =", "model2 = ConditionalLogit(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.05) #", "0, 0, 1, 0] x2 = np.r_[2, 1, 0, 0,", "= np.dot(x, params) + group_effects mean = 1 / (1", "# From Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712],", "model = ConditionalLogit(y, x, groups=g) # Check the gradient for", "rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def test_poisson_1d(): y", "data=df) else: model2 = ConditionalPoisson.from_formula( \"y ~ 0 + x1", "x[:, 0] = x1 x[:, 1] = x2 model =", "2] g = np.r_[0, 0, 0, 0, 1, 1, 1,", "= ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.2) assert_allclose(result2.params, result3.params)", "x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression test assert_allclose(result2.params,", "from statsmodels.discrete.conditional_models import ( ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff import approx_fprime", "1, 0] x = x[:, None] model = ConditionalPoisson(y, x,", "ConditionalLogit(y, x, groups=g) # Check the gradient for the denominator", "= approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad) # Check", "test_logit_2d(): y = np.r_[0, 1, 0, 1, 0, 1, 0,", "np.r_[-0.5*x, 0.5*x] grad = approx_fprime(params, model.loglike) score = model.score(params) assert_allclose(grad,", "model.fit() # From Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5)", "= model.score(np.r_[x, ]) assert_allclose(grad, score, rtol=1e-4) result = model.fit() #", "6] g = np.r_[0, 0, 0, 1, 1, 1, 2,", "1, 1, 1] g = np.r_[0, 0, 0, 1, 1,", "x = np.empty((10, 2)) x[:, 0] = x1 x[:, 1]", "-.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5) result.summary() def test_lasso_logistic(): np.random.seed(3423948)", "= model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalLogit(y, x,", "model0 model1 = ConditionalLogit(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0)", ".1686712], rtol=1e-5) result.summary() def test_lasso_logistic(): np.random.seed(3423948) n = 200 groups", "4, 8, 2, 5, 4, 7, 2, 6] g =", "= np.random.randint(0, 25, size=n) x = np.hstack((x1[:, None], x2[:, None]))", "approx_fprime from numpy.testing import assert_allclose import pandas as pd def", "np.r_[2, 1, 0, 0, 1, 2, 3, 2, 0, 1]", "pd def test_logit_1d(): y = np.r_[0, 1, 0, 1, 0,", "mean = np.exp(lin_pred) y = np.random.poisson(mean) model0 = ConditionalPoisson(y, x,", "result1 = model1.fit_regularized(L1_wt=0, alpha=0) assert_allclose(result0.params, result1.params, rtol=1e-3) model2 = ConditionalPoisson(y,", "numpy.testing import assert_allclose import pandas as pd def test_logit_1d(): y", "2, size=n) x1 = np.random.normal(size=n) x2 = np.random.normal(size=n) g =", "1, 0, 1, 1, 1] x = np.empty((10, 2)) x[:,", "ngrad = approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad, rtol=1e-5)", "numpy as np from statsmodels.discrete.conditional_models import ( ConditionalLogit, ConditionalPoisson) from", "assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def test_logit_2d(): y =", "g}) if j == 0: model2 = ConditionalLogit.from_formula( \"y ~", "score, rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[-.9478957,", "0] x = x[:, None] model = ConditionalPoisson(y, x, groups=g)", "From Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def test_logit_2d():", "np.r_[0, 1, 0, 1, 0, 1, 0, 1, 1, 1]", "0, 1: np.random.seed(34234) n = 200 y = np.random.randint(0, 2,", "From Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def test_poisson_2d():", "4, 5, 2, 0, 1, 6, 2] g = np.r_[0,", "1, 1] g = np.r_[0, 0, 0, 1, 1, 1,", "1, 1, 1, 1, 1] x = np.r_[0, 1, 0,", "1, 0, 1, 0, 1, 1, 1] g = np.r_[0,", "x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.2) # Regression test assert_allclose(result2.params,", "rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279],", "x2 + x3 + x4\" model3 = ConditionalLogit.from_formula(fml, groups=\"groups\", data=df)", "( ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff import approx_fprime from numpy.testing import", "Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def test_logit_2d(): y", "1, 0] lin_pred = np.dot(x, params) + group_effects mean =", "Check the gradient for the loglikelihood for x in -1,", "test_poisson_1d(): y = np.r_[3, 1, 1, 4, 5, 2, 0,", "1, 1, 1] x = np.r_[0, 1, 0, 0, 1,", "= x1 x[:, 1] = x2 model = ConditionalPoisson(y, x,", "x = x[:, None] model = ConditionalPoisson(y, x, groups=g) #", "0: model2 = ConditionalLogit.from_formula( \"y ~ 0 + x1 +", "0] = x1 x[:, 1] = x2 model = ConditionalLogit(y,", "~ 0 + x1 + x2\", groups=\"g\", data=df) else: model2", "j == 0: model1 = ConditionalLogit(y, x, groups=g) else: model1", "groups=\"g\", data=df) result2 = model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse,", "import numpy as np from statsmodels.discrete.conditional_models import ( ConditionalLogit, ConditionalPoisson)", "rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5)", "np.random.seed(34234) n = 200 y = np.random.randint(0, 2, size=n) x1", "df = pd.DataFrame({\"y\": y, \"x1\": x[:, 0], \"x2\": x[:, 1],", "0, 0, 1, 1, 1, 2, 2, 2, 2] x", "as pd def test_logit_1d(): y = np.r_[0, 1, 0, 1,", "x = np.hstack((x1[:, None], x2[:, None])) if j == 0:", "loglikelihood for x in -1, 0, 1, 2: params =", "j == 0: model2 = ConditionalLogit.from_formula( \"y ~ 0 +", "3, 2, 0, 1] x = np.empty((10, 2)) x[:, 0]", "y, \"x1\": x[:, 0], \"x2\": x[:, 1], \"x3\": x[:, 2],", "model0 model1 = ConditionalPoisson(y, x, groups=groups) result1 = model1.fit_regularized(L1_wt=0, alpha=0)", "0.55235152, 0], rtol=1e-4) # Test with formula df = pd.DataFrame({\"y\":", "score = model.score(np.r_[x, ]) assert_allclose(grad, score, rtol=1e-4) result = model.fit()", "model1 = ConditionalLogit(y, x, groups=g) else: model1 = ConditionalPoisson(y, x,", "0, 1, 1, 1, 2, 2, 2, 2] x =", "0, 0, 1, 1, 1, 1, 1, 1] x =", "in -1, 0, 1, 2: params = np.r_[x, ] _,", "gradient for the denominator of the partial likelihood for x", "1, 0, 0, 1, 0] x = x[:, None] model", "in -1, 0, 1, 2: params = np.r_[x, -1.5*x] _,", "grad = model._denom_grad(0, params) ngrad = approx_fprime(params, lambda x: model._denom(0,", "= model1.fit() df = pd.DataFrame({\"y\": y, \"x1\": x1, \"x2\": x2,", "from statsmodels.tools.numdiff import approx_fprime from numpy.testing import assert_allclose import pandas", "gradient for the loglikelihood for x in -1, 0, 1,", "j in 0, 1: np.random.seed(34234) n = 200 y =", "np.kron(groups, np.ones(n // 10)) group_effects = np.random.normal(size=10) group_effects = np.kron(group_effects,", "np.ones(n // 10)) group_effects = np.random.normal(size=10) group_effects = np.kron(group_effects, np.ones(n", "be the same as model0 model1 = ConditionalPoisson(y, x, groups=groups)", "None] model = ConditionalPoisson(y, x, groups=g) # Check the gradient", "size=n) x = np.hstack((x1[:, None], x2[:, None])) if j ==", "0, 1, 0, 1, 1, 1] g = np.r_[0, 0,", "ConditionalLogit(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.05) # Rxegression test", "groups=\"g\", data=df) else: model2 = ConditionalPoisson.from_formula( \"y ~ 0 +", "def test_formula(): for j in 0, 1: np.random.seed(34234) n =", "0 + x1 + x2 + x3 + x4\" model3", "-1.5*x] _, grad = model._denom_grad(0, params) ngrad = approx_fprime(params, lambda", "x in -1, 0, 1, 2: params = np.r_[-0.5*x, 0.5*x]", "assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5) result.summary() def", "1, 6, 2] g = np.r_[0, 0, 0, 0, 1,", "x[:, 3], \"groups\": groups}) fml = \"y ~ 0 +", "# From Stata assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738],", "x, groups=groups) result0 = model0.fit() # Should be the same", "0, 1, 0] lin_pred = np.dot(x, params) + group_effects mean", "model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5)", "= x[:, None] model = ConditionalLogit(y, x, groups=g) # Check", "1, 2, 3, 2, 0, 1] x = np.empty((10, 2))", "2, 6] g = np.r_[0, 0, 0, 1, 1, 1,", "= (np.random.uniform(size=n) < mean).astype(np.int) model0 = ConditionalLogit(y, x, groups=groups) result0", "y, \"x1\": x1, \"x2\": x2, \"g\": g}) if j ==", "assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues,", "result3 = model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params) def test_lasso_poisson(): np.random.seed(342394) n", "= np.dot(x, params) + group_effects mean = np.exp(lin_pred) y =", "1, 0] x2 = np.r_[0, 0, 1, 0, 0, 1,", "g = np.r_[0, 0, 0, 0, 1, 1, 1, 1,", "result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5) assert_allclose(result1.tvalues, result2.tvalues,", "2, 0, 1] x = np.empty((10, 2)) x[:, 0] =", "+ x2\", groups=\"g\", data=df) else: model2 = ConditionalPoisson.from_formula( \"y ~", "= model0.fit() # Should be the same as model0 model1", "== 0: model1 = ConditionalLogit(y, x, groups=g) else: model1 =", "x2[:, None])) if j == 0: model1 = ConditionalLogit(y, x,", "assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0], rtol=1e-4) # Test with formula", "x1 + x2\", groups=\"g\", data=df) else: model2 = ConditionalPoisson.from_formula( \"y", "ConditionalPoisson) from statsmodels.tools.numdiff import approx_fprime from numpy.testing import assert_allclose import", "np.random.normal(size=(n, 4)) params = np.r_[0, 0, 1, 0] lin_pred =", "result.summary() def test_formula(): for j in 0, 1: np.random.seed(34234) n", "1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5) result.summary() def test_formula(): for", "model._denom_grad(0, params) ngrad = approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad,", "x[:, 2], \"x4\": x[:, 3], \"groups\": groups}) fml = \"y", "] _, grad = model._denom_grad(0, params) ngrad = approx_fprime(params, lambda", "assert_allclose(grad, score, rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params,", "size=n) x1 = np.random.normal(size=n) x2 = np.random.normal(size=n) g = np.random.randint(0,", "x, groups=g) # Check the gradient for the loglikelihood for", "model.loglike) score = model.score(np.r_[x, ]) assert_allclose(grad, score, rtol=1e-4) result =", "-1, 0, 1, 2: params = np.r_[x, ] _, grad", "= ConditionalPoisson.from_formula( \"y ~ 0 + x1 + x2\", groups=\"g\",", "model.fit() # From Stata assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784,", "the denominator of the partial likelihood for x in -1,", "None] model = ConditionalLogit(y, x, groups=g) # Check the gradient", "groups = np.arange(10) groups = np.kron(groups, np.ones(n // 10)) group_effects", "assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def test_poisson_2d(): y =", "score, rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[0.6466272],", "from numpy.testing import assert_allclose import pandas as pd def test_logit_1d():", "np.random.normal(size=n) g = np.random.randint(0, 25, size=n) x = np.hstack((x1[:, None],", "6, 2] g = np.r_[0, 0, 0, 0, 1, 1,", "= ConditionalLogit(y, x, groups=groups) result0 = model0.fit() # Should be", "= pd.DataFrame({\"y\": y, \"x1\": x1, \"x2\": x2, \"g\": g}) if", "1, 1, 1, 2, 2, 2, 2] x = np.r_[0,", "Stata assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4) assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def test_poisson_2d(): y", "params) ngrad = approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad,", "2] x = np.r_[0, 1, 0, 0, 1, 1, 0,", "x[:, 1] = x2 model = ConditionalPoisson(y, x, groups=g) #", "the same as model0 model1 = ConditionalPoisson(y, x, groups=groups) result1", "= np.empty((10, 2)) x[:, 0] = x1 x[:, 1] =", "\"y ~ 0 + x1 + x2\", groups=\"g\", data=df) result2", "groups=g) # Check the gradient for the loglikelihood for x", "assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def test_logit_2d(): y = np.r_[0, 1, 0,", "= np.r_[0, 0, 1, 0] lin_pred = np.dot(x, params) +", "the same as model0 model1 = ConditionalLogit(y, x, groups=groups) result1", "rtol=1e-3) model2 = ConditionalPoisson(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1, alpha=0.2)", "// 10)) x = np.random.normal(size=(n, 4)) params = np.r_[0, 0,", "approx_fprime(params, lambda x: model._denom(0, x)) assert_allclose(grad, ngrad) # Check the", "0] x2 = np.r_[0, 0, 1, 0, 0, 1, 0,", "x in -1, 0, 1, 2: grad = approx_fprime(np.r_[x, ],", "10)) group_effects = np.random.normal(size=10) group_effects = np.kron(group_effects, np.ones(n // 10))", "result1.params, rtol=1e-3) model2 = ConditionalLogit(y, x, groups=groups) result2 = model2.fit_regularized(L1_wt=1,", "\"y ~ 0 + x1 + x2\", groups=\"g\", data=df) else:", "score, rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[0.9272407],", "]) assert_allclose(grad, score, rtol=1e-4) result = model.fit() # From Stata", "= np.r_[3, 1, 4, 8, 2, 5, 4, 7, 2,", "1, 2, 2, 2, 2] x1 = np.r_[0, 1, 0,", "0, 1, 2: grad = approx_fprime(np.r_[x, ], model.loglike) score =", "as np from statsmodels.discrete.conditional_models import ( ConditionalLogit, ConditionalPoisson) from statsmodels.tools.numdiff", "0, 1, 2: params = np.r_[x, -1.5*x] _, grad =", "Stata assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5) result.summary()", "assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5) def test_poisson_1d(): y = np.r_[3, 1, 1,", "ConditionalPoisson.from_formula( \"y ~ 0 + x1 + x2\", groups=\"g\", data=df)", "1, 1] x = np.empty((10, 2)) x[:, 0] = x1", "df = pd.DataFrame({\"y\": y, \"x1\": x1, \"x2\": x2, \"g\": g})", "= np.r_[0, 0, 0, 0, 1, 1, 1, 1, 1,", "1 / (1 + np.exp(-lin_pred)) y = (np.random.uniform(size=n) < mean).astype(np.int)", "= np.random.normal(size=(n, 4)) params = np.r_[0, 0, 1, 0] lin_pred", "0, 0, 1, 1, 0, 0, 1, 0] x2 =", "n = 200 groups = np.arange(10) groups = np.kron(groups, np.ones(n", "3], \"groups\": groups}) fml = \"y ~ 0 + x1", "groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.05) assert_allclose(result2.params, result3.params) def test_lasso_poisson():", "approx_fprime(np.r_[x, ], model.loglike) score = model.score(np.r_[x, ]) assert_allclose(grad, score, rtol=1e-4)", "# From Stata assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5) assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5) def", "ConditionalPoisson(y, x, groups=g) # Check the gradient for the loglikelihood", "score, rtol=1e-4) result = model.fit() # From Stata assert_allclose(result.params, np.r_[1.011074,", "0, 1, 1, 0, 0, 1, 0] x2 = np.r_[0,", "x: model._denom(0, x)) assert_allclose(grad, ngrad, rtol=1e-5) # Check the gradient", "assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3) assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5) result.summary() def", "partial likelihood for x in -1, 0, 1, 2: params", "# Check the gradient for the loglikelihood for x in", "Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3) assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5) result.summary()", "1, 0, 1, 1, 1] g = np.r_[0, 0, 0,", "2: params = np.r_[x, ] _, grad = model._denom_grad(0, params)", "Regression test assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0], rtol=1e-4) # Test", "x[:, None] model = ConditionalPoisson(y, x, groups=g) # Check the", "model = ConditionalPoisson(y, x, groups=g) # Check the gradient for", "2, 2] x = np.r_[0, 1, 0, 0, 1, 1,", "def test_lasso_poisson(): np.random.seed(342394) n = 200 groups = np.arange(10) groups", "0, 0, 1, 1, 1, 2, 2, 2, 2] x1", "\"g\": g}) if j == 0: model2 = ConditionalLogit.from_formula( \"y", "1] x = np.r_[0, 1, 0, 0, 1, 1, 0,", "in -1, 0, 1, 2: params = np.r_[-0.5*x, 0.5*x] grad", "model.loglike) score = model.score(params) assert_allclose(grad, score, rtol=1e-4) result = model.fit()", "2, 2, 2] x = np.r_[0, 1, 0, 0, 1,", "1, 1, 1] x = np.empty((10, 2)) x[:, 0] =", "model3 = ConditionalPoisson.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1, alpha=0.2) assert_allclose(result2.params,", "1] = x2 model = ConditionalLogit(y, x, groups=g) # Check", "= model._denom_grad(0, params) ngrad = approx_fprime(params, lambda x: model._denom(0, x))", "0, 1, 1, 1] x = np.empty((10, 2)) x[:, 0]", "0, 1, 0] x = x[:, None] model = ConditionalPoisson(y,", "import approx_fprime from numpy.testing import assert_allclose import pandas as pd", "10)) x = np.random.normal(size=(n, 4)) params = np.r_[0, 0, 1,", "result2 = model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(),", "2, 2, 2] x1 = np.r_[0, 1, 0, 0, 1,", "assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0], rtol=1e-4) # Test with formula", "np.random.seed(3423948) n = 200 groups = np.arange(10) groups = np.kron(groups,", "result = model.fit() # From Stata assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3)", "= np.kron(groups, np.ones(n // 10)) group_effects = np.random.normal(size=10) group_effects =", "x = np.r_[0, 1, 0, 0, 1, 1, 0, 0,", "2)) x[:, 0] = x1 x[:, 1] = x2 model", "params) + group_effects mean = np.exp(lin_pred) y = np.random.poisson(mean) model0", "\"x3\": x[:, 2], \"x4\": x[:, 3], \"groups\": groups}) fml =", "1, 2, 2, 2, 2] x = np.r_[0, 1, 0,", "0, 1, 0, 1, 0, 1, 1, 1] g =", "0.91697508, 0], rtol=1e-4) # Test with formula df = pd.DataFrame({\"y\":", "-1, 0, 1, 2: params = np.r_[x, -1.5*x] _, grad", "pandas as pd def test_logit_1d(): y = np.r_[0, 1, 0,", "else: model2 = ConditionalPoisson.from_formula( \"y ~ 0 + x1 +", "(np.random.uniform(size=n) < mean).astype(np.int) model0 = ConditionalLogit(y, x, groups=groups) result0 =", "test_logit_1d(): y = np.r_[0, 1, 0, 1, 0, 1, 0,", "fml = \"y ~ 0 + x1 + x2 +", "= model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse, result2.bse, rtol=1e-5) assert_allclose(result1.cov_params(), result2.cov_params(),", "assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5) def test_poisson_2d(): y = np.r_[3, 1, 4,", "\"groups\": groups}) fml = \"y ~ 0 + x1 +", "np.random.seed(342394) n = 200 groups = np.arange(10) groups = np.kron(groups,", "x2\", groups=\"g\", data=df) result2 = model2.fit() assert_allclose(result1.params, result2.params, rtol=1e-5) assert_allclose(result1.bse,", "x = np.random.normal(size=(n, 4)) params = np.r_[0, 0, 1, 0]", "1, 1, 0, 0, 1, 0] x2 = np.r_[0, 0,", "likelihood for x in -1, 0, 1, 2: params =", "0] = x1 x[:, 1] = x2 model = ConditionalPoisson(y,", "1, 2: params = np.r_[-0.5*x, 0.5*x] grad = approx_fprime(params, model.loglike)", "+ x3 + x4\" model3 = ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3", "for x in -1, 0, 1, 2: grad = approx_fprime(np.r_[x,", "+ x4\" model3 = ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3 = model3.fit_regularized(L1_wt=1,", "y = np.r_[3, 1, 4, 8, 2, 5, 4, 7,", "model.score(params) assert_allclose(grad, score, rtol=1e-4) result = model.fit() # From Stata", "model0 = ConditionalPoisson(y, x, groups=groups) result0 = model0.fit() # Should", "in 0, 1: np.random.seed(34234) n = 200 y = np.random.randint(0,", "x3 + x4\" model3 = ConditionalLogit.from_formula(fml, groups=\"groups\", data=df) result3 =", "x2 model = ConditionalLogit(y, x, groups=g) # Check the gradient" ]
[ "(job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False)) def bypass_jira_check(job): return (job.settings.bypass_jira_check or job.author_bypass.get('bypass_jira_check',", "job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job): return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False)) def", "or job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job): return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False))", "return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job): return (job.settings.bypass_build_status or", "False)) def bypass_build_status(job): return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False)) def bypass_jira_check(job):", "return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job): return (job.settings.bypass_leader_approval or", "def bypass_author_approval(job): return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job): return", "bypass_incompatible_branch(job): return (job.settings.bypass_incompatible_branch or job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job): return (job.settings.bypass_peer_approval", "bypass_peer_approval(job): return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job): return (job.settings.bypass_leader_approval", "<reponame>tcarmet/bert-e def bypass_incompatible_branch(job): return (job.settings.bypass_incompatible_branch or job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job):", "(job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job): return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval',", "or job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job): return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False))", "or job.author_bypass.get('bypass_build_status', False)) def bypass_jira_check(job): return (job.settings.bypass_jira_check or job.author_bypass.get('bypass_jira_check', False))", "bypass_author_approval(job): return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job): return (job.settings.bypass_build_status", "(job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job): return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval',", "False)) def bypass_leader_approval(job): return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job):", "job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job): return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False)) def", "(job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job): return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status',", "job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job): return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False)) def", "return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job): return (job.settings.bypass_author_approval or", "or job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job): return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False))", "bypass_leader_approval(job): return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job): return (job.settings.bypass_author_approval", "def bypass_incompatible_branch(job): return (job.settings.bypass_incompatible_branch or job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job): return", "job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job): return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False)) def", "def bypass_peer_approval(job): return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job): return", "(job.settings.bypass_incompatible_branch or job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job): return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval',", "False)) def bypass_author_approval(job): return (job.settings.bypass_author_approval or job.author_bypass.get('bypass_author_approval', False)) def bypass_build_status(job):", "def bypass_build_status(job): return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False)) def bypass_jira_check(job): return", "bypass_build_status(job): return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False)) def bypass_jira_check(job): return (job.settings.bypass_jira_check", "return (job.settings.bypass_build_status or job.author_bypass.get('bypass_build_status', False)) def bypass_jira_check(job): return (job.settings.bypass_jira_check or", "return (job.settings.bypass_incompatible_branch or job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job): return (job.settings.bypass_peer_approval or", "or job.author_bypass.get('bypass_incompatible_branch', False)) def bypass_peer_approval(job): return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False))", "def bypass_leader_approval(job): return (job.settings.bypass_leader_approval or job.author_bypass.get('bypass_leader_approval', False)) def bypass_author_approval(job): return", "False)) def bypass_peer_approval(job): return (job.settings.bypass_peer_approval or job.author_bypass.get('bypass_peer_approval', False)) def bypass_leader_approval(job):" ]
[ "bkt.Callback(self.locpin_get_image, context=True), get_item_screentip = bkt.Callback(lambda index: self.items[index][1]), get_item_supertip = bkt.Callback(lambda", "Top shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self): return PPTSymbolsSettings.recent_symbols", "-*- ''' Created on 02.11.2017 @author: fstallmann ''' from __future__", "ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0] top = self.length_from_definition(pos_definition[1],", "0.25 my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs)", "self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left = left /ref_width * width fill_width", "if int(value) == -2147483648: #different values for each paragraph, so", "area according to settings in the specified item ''' #", "recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always", "item) else: #convert into shape or bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1),", "paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return value return None", "font (e.g. Arial Unicode) @classmethod def add_to_recent(cls, item): try: #try", "self.small_step = 0.1 self.round_at = 1 return getattr(par_format, self.attr) ###", "top /ref_height * height fill_height = fill_height/ref_height * height color", "InsertAfter placeholder_char = textrange.InsertAfter(\"X\") #append placeholder symbol so that InsertSymbol", "= cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and", "self.items[index] return 'Positionierung: ' + item[0] def get_item_supertip(self, index): return", "paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr == \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin", "fallback_font(self): return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item, index, context,", "placeholder_char = textrange.InsertAfter(\"X\") #append placeholder symbol so that InsertSymbol behaves", "font = item[0] or self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400,", "height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1)) return img def rect_from_definition(self, pos_definition,", "def get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format): if self.attr", "label, columns = columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten Shapes auf", "if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self, item):", "the first paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value) class", "def locpin_on_action_indexed(self, selected_item, index): self.locpin.index = index def locpin_get_image(self, context,", "children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der benutzerdefinierte Bereich wird anhand", "position, reference, presentation): # create bitmap, define pen/brush height =", "round_cm = True, convert = 'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe, )", "initialization attr = 'SpaceBefore' def __init__(self, **kwargs): ''' attr examples:", "if InsertSymbol is used before (it remains the font of", "#append placeholder symbol so that InsertSymbol behaves the same as", "index is None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery):", "= [ [u\"<NAME>\", [ 0, 0, 1, 1], 'CONTENT'], [u\"2/3", "''' create numberd shape according of settings in clicked element", "length_definition else: return 10 ## userdefined area def set_userdefined_area(self, presentation,", "__init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei Änderung {}\", **kwargs): self.locpin", "self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item, index): self.locpin.index = index def locpin_get_image(self,", "super(PositionGallery, self).__init__( label = label, columns = columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere", "to reference items = [ [u\"<NAME>\", [ 0, 0, 1,", "# get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index", "@classmethod def switch_convert_into_shape(cls, pressed): cls.convert_into_shape = pressed cls.convert_into_bitmap = False", "None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): # items:", "as TextRange does not contain LeftIndent, etc. else: for textframe", "= Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999') pen", "if value is None: #e.g. no textframe detected return None", "= self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0] top = self.length_from_definition(pos_definition[1], ref_frame[3]) +", "\"SpaceBefore\" and par_format.LineRuleBefore == 0) or (self.attr == \"SpaceAfter\" and", "return 'Verwende angezeigten Position/Größe.' def create_image(self, position, reference, presentation): #", "os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile, 0, -1, 200,", "# percentage values return reference * length_definition else: # absolute", "textrange.ParagraphFormat.LineRuleBefore == -2: #if values differ, set the same value", "and then insert symbol self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text() and selection.Type", "item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs = dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected,", "= slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize =", "= textrange.InsertAfter(item[1]) #append symbol text #so, NameFarEast and NameComplexScript should", "cls.recent_symbols @classmethod def switch_unicode_font(cls, font=None): cls.unicode_font = font #if font", "pen = Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1,", "#font name return char_inserted else: return textrange.InsertAfter(item[1]) #append symbol text", "img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile, 0, -1, 200, 200) #FileName,", "item image with target area according to settings in the", "def add_to_recent(cls, item): try: #try to remove if already exists", "value) ### Getter Methods ### def get_attr_from_shapes(self, shapes, selection): if", "in clicked element ''' item = self.items[index] position = item[1]", "1./3, 1], 'CONTENT'], [u\"1/3 Rechts\", [2./3, 0, 1./3, 1], 'CONTENT'],", "[ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")),", "0 new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except: shape.select() else:", "utf-8 -*- ''' Created on 02.11.2017 @author: fstallmann ''' from", "selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery): def __init__(self,", "text selected self.set_attr_for_textrange(selection.TextRange2, value) #need to use TextRange2 as TextRange", "# items: [label, position, reference] # position: [left, top, width,", "selection: self.on_action_indexed(None, index, context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), )", "des gewählten Shapes festgelegt. Dieser Bereich ist anschließend über die", "(self.attr == \"SpaceAfter\" and par_format.LineRuleAfter == 0) or (self.attr ==", "height color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width),", "on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None, index, context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda:", "if there is no textrange, e.g. selection within a chart", "and throws TypeError if char_number > 61695: #for higher numbers", "= os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile, 0, -1,", "shapes ''' value = max(0,value) for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe,", "shows unicode chars til f0ff. raise TypeError(\"character number to large", "self.big_step = 0.2 self.small_step = 0.1 self.round_at = 1 return", "index): # item = self.items[index] # return \"%s\" % getattr(NumberedShapes,", "getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns] def get_item_image(self, index, presentation): ''' creates", "0: # negative values specify distance 'from right' return reference", "the same value as in the first paragraph textrange.ParagraphFormat.LineRuleAfter =", "is not None except: return False def get_index_as_button(self, index): return", "to InsertAfter placeholder_char = textrange.InsertAfter(\"X\") #append placeholder symbol so that", "MarginBottom, MarginLeft, MarginRight ''' #self.attr is automatically set through RibbonControl", "maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert newly inserted symbols", "# absolute values return length_definition else: return 10 ## userdefined", "get_attr_from_textframe(self, textframe): return getattr(textframe, self.attr) ### Setter methods ### def", "Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1)) return", "#font name def insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange,", "switch_unicode_font(cls, font=None): cls.unicode_font = font #if font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"]", "large negative number (values differ between selected items) with fallback", "bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls, pressed):", "self.fallback_value else: return value def on_change(self, shapes, selection, value): self.set_attr_for_shapes(shapes,", "return self.create_image(item[1], item[2], presentation) def get_item_screentip(self, index): # retrieve item-settings", "self.get_attr_from_shapes(shapes, selection) if value is None: #e.g. no textframe detected", "get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format): if self.attr in", "und wird dauerhaft in der aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled", "= 0.25 my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox,", "get_attr_from_shapes(self, shapes, selection): ''' Get attr for shapes ''' for", "columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda:", "= top /ref_height * height fill_height = fill_height/ref_height * height", "img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None) tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\")", "Mitte\", [1./3, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Rechts\", [2./3, 0,", "item) elif PPTSymbolsSettings.convert_into_text() and selection.Type == 2 and not shift_or_ctrl:", "symbol so that InsertSymbol behaves the same as InsertAfter return", "from . import helpers as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance", "set_attr_for_shapes(self, shapes, selection, value): ''' Set attr for shapes '''", "__init__(self, positions=None, label=\"Standardpositionen\", columns=3, **kwargs): self.items = positions or PositionGallery.items", "pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self, textrange, value): #using textrange instead", "None else: # shapes selected for textframe in pplib.iterate_shape_textframes(shapes): try:", "tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile, 0,", "[u\"1/3 Mitte\", [1./3, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Rechts\", [2./3,", "value as in the first paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat,", "char_inserted else: return textrange.InsertAfter(item[1]) #append symbol text # if item[0]:", "doesnt work) InsertSymbol does not work anymore. Also the default", "value as in the first paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if", "= page_setup.SlideWidth, page_setup.SlideHeight # target size left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width,", "else: # SLIDE / ABS ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height", "== 3: # text selected try: # produces error if", "item[0] or self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None) tmpfile", "def __init__(self, positions=None, label=\"Standardpositionen\", columns=3, **kwargs): self.items = positions or", "for shape in shapes: # if shape.HasTextFrame == -1: #", "benutzerdefinierte Bereich wird anhand des gewählten Shapes festgelegt. Dieser Bereich", "and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self): return PPTSymbolsSettings.unicode_font", "is no textrange, e.g. selection within a chart return self._get_attr(selection.TextRange2.ParagraphFormat)", "so get value from first paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except:", "[u\"1/2 Links\", [ 0, 0, .5, 1], 'CONTENT'], [u\"1/2 Mitte\",", "instead of textframe! if self.attr == \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore ==", "self.big_step = 3 self.small_step = 1 self.round_at = 0 else:", "e.g. emojis, and throws TypeError if char_number > 61695: #for", "[u\"<NAME>\", [ 0, 0, 1, 1], 'CONTENT'], [u\"2/3 Links\", [", "### def get_text(self, shapes, selection): value = self.get_attr_from_shapes(shapes, selection) if", "@classmethod def get_convert_into_shape(cls): return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL)", "NameFarEast and NameComplexScript should be writable, but they are not", "/ ABS page_setup = context.presentation.PageSetup ref_left,ref_top = 0, 0 ref_width,ref_height", "''' item = self.symbols[index] self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT)", "font of the symbol). only way to replace these values", "Unicode=True except TypeError: char_inserted = textrange.InsertAfter(item[1]) #append symbol text #so,", "pplib.ContentArea.read_contentarea(presentation) if len(self.items) == 12: self.items.pop() self.items.append([u\"Benutzerdef. Bereich\", [left, top,", "get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None, index, context, selection)), get_image=bkt.Callback(lambda:", "error if there is no textrange, e.g. selection within a", "presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left = left /ref_width", "elif type(length_definition) in [int, float, long]: if length_definition < 0:", "values are converted according to reference items = [ [u\"<NAME>\",", "< 0: # negative values specify distance 'from right' return", "**kwargs) def get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return len(self.items) # def get_enabled(self,", "selected_item, index, context, **kwargs): ''' reposition shapes according of settings", "= bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks", "insert as symbol font = item[0] or self.fallback_font try: char_number", "bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls): return", "] def __init__(self, positions=None, label=\"Standardpositionen\", columns=3, **kwargs): self.items = positions", "[ # Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) # for gal_item in self.items", "def create_symbol_shape(self, slide, item): shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200)", "no textrange, e.g. selection within a chart return self._get_attr(selection.TextRange2.ParagraphFormat) except:", "error for certain chart types, e.g. Treemap continue return None", "= fill_width /ref_width * width top = top /ref_height *", "== 3 and not shift_or_ctrl: #text selected selection.TextRange2.Text = \"\"", "0, 0, 1, 1./6], 'CONTENT'], [u\"1/6 Unten\", [ 0, 5./6,", "(\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\",", "size_string = '###', round_cm = True, convert = 'pt_to_cm', get_enabled", "item): shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap = 0", "\"SpaceAfter\", \"SpaceWithin\"]: if (self.attr == \"SpaceBefore\" and par_format.LineRuleBefore == 0)", "### Instance initialization attr = 'SpaceBefore' def __init__(self, **kwargs): '''", "self.attr) ### Setter methods ### def set_attr_for_shapes(self, shapes, selection, value):", "0, 0, .5, 1], 'CONTENT'], [u\"1/2 Mitte\", [.25, 0, .5,", "2 and not shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert", "except: shape.select() else: new_shape.select() else: shape.select() def create_symbol_bitmap(self, slide, item):", "not shift_or_ctrl: #text selected selection.TextRange2.Text = \"\" #remove selected text", "'Verwende angezeigten Position/Größe.' def create_image(self, position, reference, presentation): # create", "get_item_count = bkt.Callback(lambda: len(self.items)), get_item_label = bkt.Callback(lambda index: self.items[index][1]), get_item_image", "= \"+mn-cs\" char_inserted.Font.Name = font #font name return char_inserted else:", "width fill_width = fill_width /ref_width * width top = top", "= fill_height/ref_height * height color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color)", "#ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft = 0", "-2147483648: #different values for each paragraph, so get value from", "placeholder_char.InsertSymbol(font, char_number, -1) #symbol: FontName, CharNumber (decimal), Unicode=True except TypeError:", "True # def get_item_label(self, index): # item = self.items[index] #", "as symbol font = item[0] or self.fallback_font try: char_number =", "#replace large negative number (values differ between selected items) with", "selection, value): self.set_attr_for_shapes(shapes, selection, value) ### Getter Methods ### def", "'CONTENT'], [u\"1/3 Rechts\", [2./3, 0, 1./3, 1], 'CONTENT'], [u\"1/6 Oben\",", "in kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def", "0, 0, 2./3, 1], 'CONTENT'], [u\"2/3 Rechts\", [1./3, 0, 2./3,", "\"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\",", "+= self.length_from_definition(ldef, reference) return l elif type(length_definition) in [int, float,", "except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self, index): try: return", "handling self.fallback_value = 0 my_kwargs = dict( size_string = '###',", "the first paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr == \"SpaceAfter\"", "is not None: #font name is given, then insert as", "def get_index_as_button(self, index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda", "-2147483648: #replace large negative number (values differ between selected items)", "{}\", **kwargs): self.locpin = locpin or pplib.GlobalLocPin self.items = [", "return None elif int(value) == -2147483648: #replace large negative number", "text is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: # produces error", "switch_convert_into_shape(cls, pressed): cls.convert_into_shape = pressed cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] =", "context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): # items: [label, position,", "= bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert newly inserted symbols into shapes", "None: #font name is given, then insert as symbol font", "presentation, shapes): if len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame", "int(value) == -2147483648: #replace large negative number (values differ between", "else: # absolute values return length_definition else: return 10 ##", "shift_or_ctrl: #text selected selection.TextRange2.Text = \"\" #remove selected text first", "def insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for", "font=None): cls.unicode_font = font #if font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] =", "''' from __future__ import absolute_import from collections import deque import", "= dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed =", "return \"%s\" % getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns] def get_item_image(self, index,", "to settings in the specified item ''' # retrieve item-settings", "return getattr(par_format, self.attr) ### Setter methods ### def set_attr_for_shapes(self, shapes,", "cls.convert_into_shape = pressed cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"]", "and par_format.LineRuleWithin == 0): self.huge_step = 10 self.big_step = 3", "index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None, index, context, selection)),", "item[0] def get_item_supertip(self, index): return 'Verwende angezeigten Position/Größe.' def create_image(self,", "get_item_label = bkt.Callback(lambda index: self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image, context=True), get_item_screentip", "deque import bkt from bkt import dotnet Drawing = dotnet.import_drawing()", "[\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True else: my_kwargs[\"round_cm\"] = True", "try: return self.get_attr_from_textframe(textframe) except: # produces error for certain chart", "setattr(textrange.ParagraphFormat, self.attr, value) class PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3)", "= [ # Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) # for gal_item in", "icon is setting it to '+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript", "pos_definition, ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0] top =", "== \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter == -2: #if values differ, set", "return not (cls.convert_into_shape or cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls, pressed): cls.convert_into_shape", "ldef in length_definition: l += self.length_from_definition(ldef, reference) return l elif", "'CONTENT'], [u\"1/2 Rechts\", [ .5, 0, .5, 1], 'CONTENT'], [u\"1/3", "e.g. Treemap continue return None def get_attr_from_textframe(self, textframe): return getattr(textframe,", "setattr(textframe, self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr =", "index): try: return self.symbols[index] is not None except: return False", "self.items[index][1]), get_item_supertip = bkt.Callback(lambda index: self.items[index][2]), # children = [", "new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except: shape.select() else: new_shape.select()", "False cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap", "Getter Methods ### def get_attr_from_shapes(self, shapes, selection): if selection.Type ==", "settings in the specified item ''' # retrieve item-settings item", "= locpin or pplib.GlobalLocPin self.items = [ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")),", "ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod def switch_unicode_font(cls, font=None): cls.unicode_font", "import absolute_import from collections import deque import bkt from bkt", "symbols(self, value): pass def get_item_image(self, index): try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index)", "Treemap continue try: if int(value) == -2147483648: #different values for", "Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0,", "examples: SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing ''' #self.attr is automatically", "there is no textrange, e.g. selection within a chart return", "or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type == 3 and not shift_or_ctrl: #text", "LinkToFile, SaveWithDocument, Left, Top shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def", "collections import deque import bkt from bkt import dotnet Drawing", "* width fill_width = fill_width /ref_width * width top =", "= Drawing.Bitmap(width, height) g = Drawing.Graphics.FromImage(img) # reference size if", "= 0 new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except: shape.select()", "helpers as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr =", "''' attr examples: SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing ''' #self.attr", "[1./3, 0, 2./3, 1], 'CONTENT'], [u\"1/2 Links\", [ 0, 0,", "(e.g. Arial Unicode) @classmethod def add_to_recent(cls, item): try: #try to", "FontName, CharNumber (decimal), Unicode=True except TypeError: char_inserted = textrange.InsertAfter(item[1]) #append", "= bkt.Callback(self.locpin_get_image, context=True), get_item_screentip = bkt.Callback(lambda index: self.items[index][1]), get_item_supertip =", "Drawing.Bitmap(width, height) g = Drawing.Graphics.FromImage(img) # reference size if reference", "SaveWithDocument, Left, Top shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self):", "item[2], presentation) def get_item_screentip(self, index): # retrieve item-settings item =", "fill_width /ref_width * width top = top /ref_height * height", "!= \"FirstLineIndent\": #FirstLineIndent can be negative! value = max(0,value) if", "# item = self.items[index] # return \"%s\" % getattr(NumberedShapes, 'label_'", "#ord does not work for higher level unicode, e.g. emojis,", "l elif type(length_definition) in [int, float, long]: if length_definition <", "# shapes selected for textframe in pplib.iterate_shape_textframes(shapes): try: value =", "left = left /ref_width * width fill_width = fill_width /ref_width", "#due to performance check first if tag exists at all", "= presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left =", "ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'SpaceBefore' def __init__(self, **kwargs):", "50%] l = 0 for ldef in length_definition: l +=", "import dotnet Drawing = dotnet.import_drawing() from . import helpers as", "0 shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if item[0]: #", "top, width, height = pplib.ContentArea.read_contentarea(presentation) if len(self.items) == 12: self.items.pop()", "item)) for textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) # for shape", "my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item,", "[label, position, reference] # position: [left, top, width, height] #", "0.1 self.round_at = 1 return getattr(par_format, self.attr) ### Setter methods", "= shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape =", "(height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else: # SLIDE /", "/ SLIDE / ABS # values are converted according to", "__future__ import absolute_import from collections import deque import bkt from", "pressed): cls.convert_into_shape = False cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape", "def locpin_get_image(self, context, index=None): if index is None: return context.python_addin.load_image(self.items[self.locpin.index][0])", "shape.HasTextFrame == -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self, slide, item):", "frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation): #due to performance check first", "ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame = pplib.BoundingFrame.from_rect(left, top, width, height) if 'on_position_change'", "width, height = pplib.ContentArea.read_contentarea(presentation) if len(self.items) == 12: self.items.pop() self.items.append([u\"Benutzerdef.", "value): pass def get_item_image(self, index): try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except:", "def get_attr_from_textframe(self, textframe): return getattr(textframe, self.attr) ### Setter methods ###", "in length_definition: l += self.length_from_definition(ldef, reference) return l elif type(length_definition)", "width, height] # values can be absolute or percentage #", "tag exists at all if pplib.ContentArea.isset_contentarea(presentation): left, top, width, height", "SLIDE / ABS # values are converted according to reference", "# values are converted according to reference items = [", "= dict( size_string = '###', round_cm = True, convert =", "#for higher numbers (f0ff works, f100 doesnt work) InsertSymbol does", "char_inserted = textrange.InsertAfter(item[1]) #append symbol text #so, NameFarEast and NameComplexScript", "dauerhaft in der aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto", "reference * length_definition else: # absolute values return length_definition else:", "\"SpaceWithin\"]: if (self.attr == \"SpaceBefore\" and par_format.LineRuleBefore == 0) or", "context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery): def", "1 self.round_at = 0 else: self.huge_step = 0.5 self.big_step =", "PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self): return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self,", "return value return None def get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat) def", "def get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return len(self.items) # def get_enabled(self, shapes):", "FirstLineIndent, LineSpacing ''' #self.attr is automatically set through RibbonControl attribute", "item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs =", "clicked element ''' item = self.symbols[index] self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL)", "os font = item[0] or self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1],", "TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'MarginTop' def __init__(self, **kwargs):", "SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing ''' #self.attr is automatically set through", "vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto ) ], **kwargs ) def", "percentage # reference: CONTENTE / SLIDE / ABS # values", "10 ## userdefined area def set_userdefined_area(self, presentation, shapes): if len(shapes)", "self.fallback_font try: char_number = ord(item[1]) #ord does not work for", "def symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self, value): pass def", "selection.Type == 3: # text selected self.set_attr_for_textrange(selection.TextRange2, value) #need to", "length_definition: l += self.length_from_definition(ldef, reference) return l elif type(length_definition) in", "v_ref]) else: # SLIDE / ABS ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight", "throws TypeError if char_number > 61695: #for higher numbers (f0ff", "Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der benutzerdefinierte Bereich wird", "set_attr_for_textrange(self, textrange, value): #using textrange instead of textframe! if self.attr", "tempfile, os font = item[0] or self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font,", "= True else: my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"] = \"pt_to_cm\" if", "Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999') pen =", "1], 'CONTENT'], [u\"1/3 Mitte\", [1./3, 0, 1./3, 1], 'CONTENT'], [u\"1/3", "value = self.get_attr_from_shapes(shapes, selection) if value is None: #e.g. no", "or (self.attr == \"SpaceWithin\" and par_format.LineRuleWithin == 0): self.huge_step =", "height/5 v_ref = (height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else:", "= pressed cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] =", "float, long]: if length_definition < 0: # negative values specify", "-1) #symbol: FontName, CharNumber (decimal), Unicode=True except TypeError: char_inserted =", "return value def on_change(self, shapes, selection, value): self.set_attr_for_shapes(shapes, selection, value)", "return 'Positionierung: ' + item[0] def get_item_supertip(self, index): return 'Verwende", "of settings in clicked element ''' item = self.items[index] position", "g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1)) return img", "between selected items) with fallback value return self.fallback_value else: return", "0, .5, 1], 'CONTENT'], [u\"1/2 Rechts\", [ .5, 0, .5,", "for InsertSymbol\") #fallback to InsertAfter placeholder_char = textrange.InsertAfter(\"X\") #append placeholder", "bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode characters as symbol with special font", "reference): if type(length_definition) == list: # allow [150, 50%] l", "0 ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight # target size left,top,width,height =", "else: # SLIDE / ABS page_setup = context.presentation.PageSetup ref_left,ref_top =", "round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1,", "self.symbols[index] self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type ==", "# ] ) if not \"image\" in kwargs and not", "return char_inserted else: return textrange.InsertAfter(item[1]) #append symbol text # if", "[int, float, long]: if length_definition < 0: # negative values", "bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod def switch_unicode_font(cls, font=None): cls.unicode_font = font", "cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap =", "if index is None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0]) class", ".5, 1], 'CONTENT'], [u\"1/2 Rechts\", [ .5, 0, .5, 1],", "(\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\",", "value as in the first paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if", "selected try: # produces error if no text is selected", "pen/brush height = 40 width = height*16./9 img = Drawing.Bitmap(width,", "or pplib.GlobalLocPin self.items = [ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\",", "\"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs = dict( #", "g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1)) return img def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]):", "return reference - self.length_from_definition(-length_definition, reference) elif length_definition <= 1: #", "index, context, selection, **kwargs): ''' create numberd shape according of", "0, 2./3, 1], 'CONTENT'], [u\"2/3 Rechts\", [1./3, 0, 2./3, 1],", "self.symbols[index] is not None except: return False def get_index_as_button(self, index):", "Unicode) @classmethod def add_to_recent(cls, item): try: #try to remove if", "bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None, index,", "symbol). only way to replace these values and correctly show", "first paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr == \"SpaceWithin\" and", "#need to use TextRange2 as TextRange does not contain LeftIndent,", "shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape = pplib.convert_text_into_shape(shape)", "selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert into shape or bitmap if", "self.symbols[index][2] except: return \"Zuletzt verwendet: Undefined\" def button_get_visible(self, index): try:", "width, height) if 'on_position_change' in self._callbacks: if context: return context.invoke_callback(self._callbacks['on_position_change'],", "Shapes auf eine Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der", "= 'SpaceBefore' def __init__(self, **kwargs): ''' attr examples: SpaceBefore, SpaceAfter,", "''' Created on 02.11.2017 @author: fstallmann ''' from __future__ import", "item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")),", "\"FirstLineIndent\": #FirstLineIndent can be negative! value = max(0,value) if selection.Type", "return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): # items: [label,", "super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ### def get_text(self, shapes,", "except: # produces error for certain chart types, e.g. Treemap", "item[0] or PPTSymbolsSettings.unicode_font is not None: #font name is given,", "bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self): return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font", "replace these values and correctly show icon is setting it", "= max(0,value) if selection.Type == 3: # text selected self.set_attr_for_textrange(selection.TextRange2,", "1./6], 'CONTENT'] ] def __init__(self, positions=None, label=\"Standardpositionen\", columns=3, **kwargs): self.items", "0) or (self.attr == \"SpaceAfter\" and par_format.LineRuleAfter == 0) or", "def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"]", "specified item ''' # retrieve item-settings item = self.items[index] return", "values return length_definition else: return 10 ## userdefined area def", "these values and correctly show icon is setting it to", "left /ref_width * width fill_width = fill_width /ref_width * width", "'CONTENT'], [u\"1/2 Mitte\", [.25, 0, .5, 1], 'CONTENT'], [u\"1/2 Rechts\",", "of settings in clicked element ''' item = self.symbols[index] self._add_to_recent(item)", "before (it remains the font of the symbol). only way", "differ between selected items) with fallback value return self.fallback_value else:", "bkt from bkt import dotnet Drawing = dotnet.import_drawing() from .", "get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery):", "and par_format.LineRuleAfter == 0) or (self.attr == \"SpaceWithin\" and par_format.LineRuleWithin", "for gal_item in self.items # ] ) if not \"image\"", "height] # values can be absolute or percentage # reference:", "symbol with special font (e.g. Arial Unicode) @classmethod def add_to_recent(cls,", "my_kwargs = dict( size_string = '###', round_cm = True, convert", "selected self.set_attr_for_textrange(selection.TextRange2, value) #need to use TextRange2 as TextRange does", ".5, 1], 'CONTENT'], [u\"1/3 Links\", [ 0, 0, 1./3, 1],", "ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight # target size left,top,width,height = self.rect_from_definition(position,", "bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap =", "self.length_from_definition(pos_definition[3], ref_frame[3]) return left, top, width, height def length_from_definition(self, length_definition,", "def insert_symbol_into_text(self, textrange, item): if item[0] or PPTSymbolsSettings.unicode_font is not", "width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1)) return img def rect_from_definition(self,", "PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self, value): pass def get_item_image(self, index): try:", "as in the first paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr,", "def _get_attr(self, par_format): if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if", "200) #FileName, LinkToFile, SaveWithDocument, Left, Top shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery):", "#shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert into shape or bitmap", "# self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self, slide, item): shape = slide.shapes.addTextbox(", "TypeError(\"character number to large for InsertSymbol\") #fallback to InsertAfter placeholder_char", "not (cls.convert_into_shape or cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls, pressed): cls.convert_into_shape =", "on_action_indexed(self, selected_item, index, context, **kwargs): ''' reposition shapes according of", "if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if (self.attr == \"SpaceBefore\"", "self.items[index] return self.create_image(item[1], item[2], presentation) def get_item_screentip(self, index): # retrieve", "= bkt.Callback(lambda: self.locpin.index), get_item_count = bkt.Callback(lambda: len(self.items)), get_item_label = bkt.Callback(lambda", "@property def symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self, value): pass", "cls.convert_into_shape = False cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"]", "self.round_at = 0 else: self.huge_step = 0.5 self.big_step = 0.2", "= cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls, pressed): cls.convert_into_shape = pressed cls.convert_into_bitmap", "pressed): cls.convert_into_shape = False cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape", "elif length_definition <= 1: # percentage values return reference *", "= self.get_attr_from_shapes(shapes, selection) if value is None: #e.g. no textframe", "bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der benutzerdefinierte Bereich wird anhand des", "# retrieve item-settings item = self.items[index] return self.create_image(item[1], item[2], presentation)", "Shapes festgelegt. Dieser Bereich ist anschließend über die Gallery wählbar", "textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value) class PPTSymbolsSettings(object): recent_symbols =", "with special font (e.g. Arial Unicode) @classmethod def add_to_recent(cls, item):", "self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung", "**kwargs): self.items = positions or PositionGallery.items super(PositionGallery, self).__init__( label =", "* height color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top),", "locpin_on_action_indexed(self, selected_item, index): self.locpin.index = index def locpin_get_image(self, context, index=None):", "Set attr for shapes ''' value = max(0,value) for textframe", "value) def set_attr_for_textrange(self, textrange, value): #using textrange instead of textframe!", "'label_' + item['label'])[index%self.columns] def get_item_image(self, index, presentation): ''' creates an", "return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self,", "61695: #for higher numbers (f0ff works, f100 doesnt work) InsertSymbol", "0, 2./3, 1], 'CONTENT'], [u\"1/2 Links\", [ 0, 0, .5,", "values differ, set the same value as in the first", "types, e.g. Treemap continue try: if int(value) == -2147483648: #different", "1, 1], 'CONTENT'], [u\"2/3 Links\", [ 0, 0, 2./3, 1],", "False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls,", "according to settings in the specified item ''' # retrieve", "0, -1, 200, 200) #FileName, LinkToFile, SaveWithDocument, Left, Top shape.select()", "'CONTENT'], [u\"1/2 Links\", [ 0, 0, .5, 1], 'CONTENT'], [u\"1/2", "1], 'CONTENT'], [u\"1/6 Oben\", [ 0, 0, 1, 1./6], 'CONTENT'],", "pplib.GlobalLocPin self.items = [ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")),", "and selection.Type == 2 and not shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection),", "get value from first paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass", "height) g = Drawing.Graphics.FromImage(img) # reference size if reference ==", "textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr == \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin == -2: #if", "self.locpin = locpin or pplib.GlobalLocPin self.items = [ (\"fix_locpin_tl\", \"Oben-links\",", "= 0 shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item)", "bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls): return", "self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format): if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]:", "def create_symbol_bitmap(self, slide, item): import tempfile, os font = item[0]", "item_width=\"24\", show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda: self.locpin.index), get_item_count", "RibbonControl attribute handling self.fallback_value = 0 my_kwargs = dict( size_string", "self).__init__( label = label, columns = columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die", "= 1 self.round_at = 0 else: self.huge_step = 0.5 self.big_step", "def convert_into_text(cls): return not (cls.convert_into_shape or cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls,", "Position/Größe.' def create_image(self, position, reference, presentation): # create bitmap, define", "chart return self._get_attr(selection.TextRange2.ParagraphFormat) except: return None else: # shapes selected", "height) if 'on_position_change' in self._callbacks: if context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame,", "cls.unicode_font @classmethod def convert_into_text(cls): return not (cls.convert_into_shape or cls.convert_into_bitmap) @classmethod", "return False def get_index_as_button(self, index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda:", "etc. else: for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self,", "shapes, selection): value = self.get_attr_from_shapes(shapes, selection) if value is None:", "font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod def convert_into_text(cls): return", "pressed): cls.convert_into_shape = pressed cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape", "negative number (values differ between selected items) with fallback value", "item[0]: # shape.TextFrame.TextRange.Font.Name = item[0] #font name # shape.TextFrame.TextRange.Text =", "self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self, textrange, value): #using textrange instead of", "locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei Änderung {}\", **kwargs): self.locpin =", "if len(self.items) == 12: self.items.pop() self.items.append([u\"Benutzerdef. Bereich\", [left, top, width,", "shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight", "0, 1, 1./6], 'CONTENT'], [u\"1/6 Unten\", [ 0, 5./6, 1,", "error if no text is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try:", "shape according of settings in clicked element ''' item =", "chart types, e.g. Treemap continue try: if int(value) == -2147483648:", "Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile, 0, -1, 200, 200) #FileName, LinkToFile,", "types, e.g. Treemap continue return None def get_attr_from_textframe(self, textframe): return", "self.items = positions or PositionGallery.items super(PositionGallery, self).__init__( label = label,", "attribute handling self.fallback_value = 0 my_kwargs = dict( size_string =", "LeftIndent, etc. else: for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def", "presentation): self.init_userdefined_area_item(presentation) return len(self.items) # def get_enabled(self, shapes): # return", "in self.items # ] ) if not \"image\" in kwargs", "size left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame = pplib.BoundingFrame.from_rect(left, top,", "cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod", "and textrange.ParagraphFormat.LineRuleWithin == -2: #if values differ, set the same", "= 1 return getattr(par_format, self.attr) ### Setter methods ### def", "symbol text #so, NameFarEast and NameComplexScript should be writable, but", "== 'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else: # SLIDE / ABS", "gal_item in self.items # ] ) if not \"image\" in", "not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap", "textrange.InsertAfter(\"X\") #append placeholder symbol so that InsertSymbol behaves the same", "textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr == \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin ==", "0, 1./3, 1], 'CONTENT'], [u\"1/6 Oben\", [ 0, 0, 1,", "index): # retrieve item-settings item = self.items[index] return 'Positionierung: '", "kwargs and not \"image_mso\" in kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True)", "* width top = top /ref_height * height fill_height =", "item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")),", "else: self.huge_step = 0.5 self.big_step = 0.2 self.small_step = 0.1", "ABS ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height])", "Rechts\", [1./3, 0, 2./3, 1], 'CONTENT'], [u\"1/2 Links\", [ 0,", "self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True else: my_kwargs[\"round_cm\"]", "(decimal), Unicode=True except TypeError: char_inserted = textrange.InsertAfter(item[1]) #append symbol text", "Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1)) return img def", "for certain chart types, e.g. Treemap continue try: if int(value)", "PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item, index, context, selection, **kwargs):", "produces error if there is no textrange, e.g. selection within", "create_symbol_shape(self, slide, item): shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap", "attr for shapes ''' for textframe in pplib.iterate_shape_textframes(shapes): try: return", "selection.Type == 2 and not shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item)", "return textrange.InsertAfter(item[1]) #append symbol text # if item[0]: # char_inserted.Font.Name", "locpin or pplib.GlobalLocPin self.items = [ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\",", "and not shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert into", "font #font name return char_inserted else: return textrange.InsertAfter(item[1]) #append symbol", "get_item_screentip = bkt.Callback(lambda index: self.items[index][1]), get_item_supertip = bkt.Callback(lambda index: self.items[index][2]),", "= deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert", "self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return value return None def get_attr_from_textrange(self, textrange):", "g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1)", "shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except:", "if self.attr == \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter == -2: #if values", "shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0", "@symbols.setter def symbols(self, value): pass def get_item_image(self, index): try: return", "= 0 shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop", "(\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\",", "LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei Änderung {}\",", "return \"Zuletzt verwendet: Undefined\" def button_get_visible(self, index): try: return self.symbols[index]", "char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name = font #font", "index, presentation): ''' creates an item image with target area", "bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert newly inserted symbols into bitmap picture", "char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name = font #font name return char_inserted", "index: self.items[index][1]), get_item_supertip = bkt.Callback(lambda index: self.items[index][2]), # children =", "der aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto ) ],", "item): import tempfile, os font = item[0] or self.fallback_font img", "label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der benutzerdefinierte Bereich wird anhand des gewählten", "bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self,", "item = self.symbols[index] self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if", "my_kwargs[\"convert\"] = \"pt_to_cm\" if self.attr in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] =", "= 0 my_kwargs = dict( size_string = '###', round_cm =", "paragraph, so get value from first paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat)", "# retrieve item-settings item = self.items[index] return 'Positionierung: ' +", "symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self, value): pass def get_item_image(self,", "= bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda: self.locpin.index), get_item_count = bkt.Callback(lambda: len(self.items)),", "ABS page_setup = context.presentation.PageSetup ref_left,ref_top = 0, 0 ref_width,ref_height =", "if already exists and add to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except", "class LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei Änderung", "is None: #e.g. no textframe detected return None elif int(value)", "self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt", "**kwargs): ''' attr examples: MarginTop, MarginBottom, MarginLeft, MarginRight ''' #self.attr", "locpin_get_image(self, context, index=None): if index is None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else:", "shapes selected for textframe in pplib.iterate_shape_textframes(shapes): try: value = self.get_attr_from_textrange(textframe.TextRange)", "item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei Änderung {}\", **kwargs): self.locpin = locpin", "3: # text selected self.set_attr_for_textrange(selection.TextRange2, value) #need to use TextRange2", "cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls): return (cls.convert_into_shape or", "pplib.BoundingFrame.from_rect(left, top, width, height) if 'on_position_change' in self._callbacks: if context:", "shape.select() else: new_shape.select() else: shape.select() def create_symbol_bitmap(self, slide, item): import", "shapes[0]) else: frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self,", "= slide.shapes.AddPicture(tmpfile, 0, -1, 200, 200) #FileName, LinkToFile, SaveWithDocument, Left,", "bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type == 3 and not shift_or_ctrl: #text selected", "the default ppt symbol-picker only shows unicode chars til f0ff.", "in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True else: my_kwargs[\"round_cm\"] =", "0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ### def", "ref_height]) frame = pplib.BoundingFrame.from_rect(left, top, width, height) if 'on_position_change' in", "initialization attr = 'MarginTop' def __init__(self, **kwargs): ''' attr examples:", "= bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type == 3 and not", "reference: CONTENTE / SLIDE / ABS # values are converted", "= \"pt_to_cm\" if self.attr in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25", "0): self.huge_step = 10 self.big_step = 3 self.small_step = 1", "default ppt symbol-picker only shows unicode chars til f0ff. raise", "True else: my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"] = \"pt_to_cm\" if self.attr", "self.small_step = 1 self.round_at = 0 else: self.huge_step = 0.5", "unicode characters as symbol with special font (e.g. Arial Unicode)", "= self.get_attr_from_textrange(textframe.TextRange) except: # produces error for certain chart types,", "item[1]) # reference size if reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height =", "PPTSymbolsSettings.convert_into_text() and selection.Type == 2 and not shift_or_ctrl: #shapes selected", "# produces error if no text is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat)", "ppt symbol-picker only shows unicode chars til f0ff. raise TypeError(\"character", "# allow [150, 50%] l = 0 for ldef in", "def switch_unicode_font(cls, font=None): cls.unicode_font = font #if font else SymbolsGallery.fallback_font", "= 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ###", "insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for textframe", "the same as InsertAfter return placeholder_char.InsertSymbol(font, char_number, -1) #symbol: FontName,", "ausgewählten Shapes auf eine Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\",", "0.125 my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box", "produces error if no text is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except:", "== \"SpaceWithin\" and par_format.LineRuleWithin == 0): self.huge_step = 10 self.big_step", "#self.change_position(selection, shapes, item[1]) # reference size if reference == 'CONTENT':", "def create_image(self, position, reference, presentation): # create bitmap, define pen/brush", "shapes, selection, value): self.set_attr_for_shapes(shapes, selection, value) ### Getter Methods ###", "'CONTENT'], [u\"1/6 Unten\", [ 0, 5./6, 1, 1./6], 'CONTENT'] ]", "(self.attr == \"SpaceWithin\" and par_format.LineRuleWithin == 0): self.huge_step = 10", "self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text() and selection.Type == 2 and not", "inserted symbols into shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert", "__init__(self, **kwargs): ''' attr examples: MarginTop, MarginBottom, MarginLeft, MarginRight '''", "char_inserted.Font.Name = item[0] #font name def insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes,", "[u\"1/6 Oben\", [ 0, 0, 1, 1./6], 'CONTENT'], [u\"1/6 Unten\",", "Get attr for shapes ''' for textframe in pplib.iterate_shape_textframes(shapes): try:", "text selected try: # produces error if no text is", "def set_userdefined_area(self, presentation, shapes): if len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation, shapes[0])", "setting it to '+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\"", "def get_attr_from_shapes(self, shapes, selection): ''' Get attr for shapes '''", "shapes: # if shape.HasTextFrame == -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def", "context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): # items: [label, position, reference] # position:", "bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ###", "False cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap", "def get_item_image(self, index, presentation): ''' creates an item image with", "item = self.items[index] return self.create_image(item[1], item[2], presentation) def get_item_screentip(self, index):", "+ item['label'])[index%self.columns] def get_item_image(self, index, presentation): ''' creates an item", "is automatically set through RibbonControl attribute handling self.fallback_value = 0", "pplib.slide_content_size(context.slide) else: # SLIDE / ABS page_setup = context.presentation.PageSetup ref_left,ref_top", "if selection.Type == 3 and not shift_or_ctrl: #text selected selection.TextRange2.Text", "as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'MarginTop'", "def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class", "# text selected self.set_attr_for_textrange(selection.TextRange2, value) #need to use TextRange2 as", "= 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if item[0]: # shape.TextFrame.TextRange.Font.Name =", "0, .5, 1], 'CONTENT'], [u\"1/2 Mitte\", [.25, 0, .5, 1],", "SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing ''' #self.attr is automatically set", "#FirstLineIndent can be negative! value = max(0,value) if selection.Type ==", "wird anhand des gewählten Shapes festgelegt. Dieser Bereich ist anschließend", "MarginTop, MarginBottom, MarginLeft, MarginRight ''' #self.attr is automatically set through", "== 0) or (self.attr == \"SpaceWithin\" and par_format.LineRuleWithin == 0):", "on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto ) ], **kwargs ) def on_action_indexed(self,", "@classmethod def add_to_recent(cls, item): try: #try to remove if already", "to '+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name =", "remains the font of the symbol). only way to replace", "def get_item_label(self, index): # item = self.items[index] # return \"%s\"", "item-settings item = self.items[index] return self.create_image(item[1], item[2], presentation) def get_item_screentip(self,", "textrange): return self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format): if self.attr in [\"SpaceBefore\",", "if reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else: # SLIDE", "self.fallback_value = 0 my_kwargs = dict( size_string = '###', round_cm", "ABS # values are converted according to reference items =", "slide, item): shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap =", "#text selected selection.TextRange2.Text = \"\" #remove selected text first and", "shapes, selection, value): ''' Set attr for shapes ''' value", "get_selected_item_index = bkt.Callback(lambda: self.locpin.index), get_item_count = bkt.Callback(lambda: len(self.items)), get_item_label =", "= 'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ###", "shapes, selection): if selection.Type == 3: # text selected try:", "def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei Änderung {}\", **kwargs):", "screentip=gal_item[1], supertip=gal_item[2]) # for gal_item in self.items # ] )", "par_format.LineRuleAfter == 0) or (self.attr == \"SpaceWithin\" and par_format.LineRuleWithin ==", "value): self.set_attr_for_shapes(shapes, selection, value) ### Getter Methods ### def get_attr_from_shapes(self,", "def symbols(self, value): pass def get_item_image(self, index): try: return super(PPTSymbolsGalleryRecent,", "settings in clicked element ''' item = self.symbols[index] self._add_to_recent(item) shift_or_ctrl", "are not if InsertSymbol is used before (it remains the", "list: # allow [150, 50%] l = 0 for ldef", "max(0,value) if selection.Type == 3: # text selected self.set_attr_for_textrange(selection.TextRange2, value)", "# position: [left, top, width, height] # values can be", "and par_format.LineRuleBefore == 0) or (self.attr == \"SpaceAfter\" and par_format.LineRuleAfter", "= item[0] or self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None)", "verwendet: Undefined\" def button_get_visible(self, index): try: return self.symbols[index] is not", "= textrange.InsertAfter(\"X\") #append placeholder symbol so that InsertSymbol behaves the", "1./3, 1], 'CONTENT'], [u\"1/3 Mitte\", [1./3, 0, 1./3, 1], 'CONTENT'],", "\"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter == -2: #if values differ, set the", "0, 1, 1], 'CONTENT'], [u\"2/3 Links\", [ 0, 0, 2./3,", "and textrange.ParagraphFormat.LineRuleAfter == -2: #if values differ, set the same", "ord(item[1]) #ord does not work for higher level unicode, e.g.", "\"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs = dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\",", "self.attr == \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore == -2: #if values differ,", "not \"image_mso\" in kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery,", "(it remains the font of the symbol). only way to", "v_offset = height/5 v_ref = (height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width,", "into bitmap picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode characters", "insert symbol self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text() and selection.Type == 2", "= cls.recent_symbols @classmethod def switch_unicode_font(cls, font=None): cls.unicode_font = font #if", "return self.fallback_value else: return value def on_change(self, shapes, selection, value):", "[\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"] =", "value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return value return None def", "converted according to reference items = [ [u\"<NAME>\", [ 0,", "InsertAfter return placeholder_char.InsertSymbol(font, char_number, -1) #symbol: FontName, CharNumber (decimal), Unicode=True", "selection.TextRange2.Text = \"\" #remove selected text first and then insert", "== 2 and not shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else:", "#append symbol text #so, NameFarEast and NameComplexScript should be writable,", "= item[0] or self.fallback_font try: char_number = ord(item[1]) #ord does", "ref_frame=[0,0,ref_width, ref_height]) left = left /ref_width * width fill_width =", "an item image with target area according to settings in", "shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize", "Arial Unicode) @classmethod def add_to_recent(cls, item): try: #try to remove", "value return None def get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat) def _get_attr(self,", "'###', round_cm = True, convert = 'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe,", "Bereich festlegen\", supertip=\"Der benutzerdefinierte Bereich wird anhand des gewählten Shapes", "(self.attr == \"SpaceBefore\" and par_format.LineRuleBefore == 0) or (self.attr ==", "item = self.items[index] # return \"%s\" % getattr(NumberedShapes, 'label_' +", ".5, 1], 'CONTENT'], [u\"1/2 Mitte\", [.25, 0, .5, 1], 'CONTENT'],", "area def set_userdefined_area(self, presentation, shapes): if len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation,", "# negative values specify distance 'from right' return reference -", "None) tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile,", "Created on 02.11.2017 @author: fstallmann ''' from __future__ import absolute_import", "3 and not shift_or_ctrl: #text selected selection.TextRange2.Text = \"\" #remove", "[ [u\"<NAME>\", [ 0, 0, 1, 1], 'CONTENT'], [u\"2/3 Links\",", "# if item[0]: # char_inserted.Font.Name = item[0] #font name def", "= Drawing.Graphics.FromImage(img) # reference size if reference == 'CONTENT': v_offset", "try: char_number = ord(item[1]) #ord does not work for higher", "top = top /ref_height * height fill_height = fill_height/ref_height *", "class PositionGallery(bkt.ribbon.Gallery): # items: [label, position, reference] # position: [left,", "in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if (self.attr == \"SpaceBefore\" and par_format.LineRuleBefore", "the font of the symbol). only way to replace these", "= textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value) class PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\",", "[ 0, 5./6, 1, 1./6], 'CONTENT'] ] def __init__(self, positions=None,", "@classmethod def switch_unicode_font(cls, font=None): cls.unicode_font = font #if font else", "= True my_kwargs[\"convert\"] = \"pt_to_cm\" if self.attr in [\"LeftIndent\", \"FirstLineIndent\"]:", "selection within a chart return self._get_attr(selection.TextRange2.ParagraphFormat) except: return None else:", "reference size if reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else:", "chart types, e.g. Treemap continue return None def get_attr_from_textframe(self, textframe):", "pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe) except: # produces error for certain", "or PPTSymbolsSettings.unicode_font is not None: #font name is given, then", "def switch_convert_into_text(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"]", "0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if item[0]: # shape.TextFrame.TextRange.Font.Name = item[0]", "position: [left, top, width, height] # values can be absolute", "placeholder symbol so that InsertSymbol behaves the same as InsertAfter", "item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")),", "absolute_import from collections import deque import bkt from bkt import", "## userdefined area def set_userdefined_area(self, presentation, shapes): if len(shapes) ==", "= columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten Shapes auf eine Standardposition.\",", "insert_symbol_into_text(self, textrange, item): if item[0] or PPTSymbolsSettings.unicode_font is not None:", "== \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore == -2: #if values differ, set", "class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self): return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def", "text #so, NameFarEast and NameComplexScript should be writable, but they", "# produces error if there is no textrange, e.g. selection", "first paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr == \"SpaceAfter\" and", "def init_userdefined_area_item(self, presentation): #due to performance check first if tag", "attr examples: SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing ''' #self.attr is", "shape.TextFrame.TextRange.Text = item[1] #symbol text if PPTSymbolsSettings.get_convert_into_shape(): #convert into shape", "does not work anymore. Also the default ppt symbol-picker only", "= False cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] =", "for higher level unicode, e.g. emojis, and throws TypeError if", "wählbar und wird dauerhaft in der aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area),", "of the symbol). only way to replace these values and", "= self.symbols[index] self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type", "retrieve item-settings item = self.items[index] return 'Positionierung: ' + item[0]", "absolute values return length_definition else: return 10 ## userdefined area", "fallback value return self.fallback_value else: return value def on_change(self, shapes,", "inserted symbols into bitmap picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert", "shapes, item[1]) # reference size if reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height", "= 0.125 my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner", "char_number, -1) #symbol: FontName, CharNumber (decimal), Unicode=True except TypeError: char_inserted", "import helpers as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr", "False) #always convert newly inserted symbols into bitmap picture unicode_font", "to large for InsertSymbol\") #fallback to InsertAfter placeholder_char = textrange.InsertAfter(\"X\")", "length_from_definition(self, length_definition, reference): if type(length_definition) == list: # allow [150,", "create numberd shape according of settings in clicked element '''", "create_image(self, position, reference, presentation): # create bitmap, define pen/brush height", "else: #convert into shape or bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item)", "if type(length_definition) == list: # allow [150, 50%] l =", "supertip=\"Der benutzerdefinierte Bereich wird anhand des gewählten Shapes festgelegt. Dieser", "def get_item_image(self, index): try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent,", "context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return len(self.items) #", "clicked element ''' item = self.items[index] position = item[1] reference", "= '-###', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) if self.attr in [\"SpaceBefore\",", "], **kwargs ) def on_action_indexed(self, selected_item, index, context, **kwargs): '''", "self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type == 3", "same value as in the first paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin", "or (self.attr == \"SpaceAfter\" and par_format.LineRuleAfter == 0) or (self.attr", "anhand des gewählten Shapes festgelegt. Dieser Bereich ist anschließend über", "= bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None) tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile,", "handling self.fallback_value = 0 my_kwargs = dict( size_string = '-###',", "= orig_fontsize except: shape.select() else: new_shape.select() else: shape.select() def create_symbol_bitmap(self,", "get_item_image = bkt.Callback(self.locpin_get_image, context=True), get_item_screentip = bkt.Callback(lambda index: self.items[index][1]), get_item_supertip", "### Getter Methods ### def get_attr_from_shapes(self, shapes, selection): if selection.Type", "page_setup = context.presentation.PageSetup ref_left,ref_top = 0, 0 ref_width,ref_height = page_setup.SlideWidth,", "100,100,200,200) shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom =", "value) class PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape =", "continue try: if int(value) == -2147483648: #different values for each", "Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) # for gal_item in self.items # ]", "index, context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery):", ") def on_action_indexed(self, selected_item, index, context, **kwargs): ''' reposition shapes", "reference size if reference == 'CONTENT': v_offset = height/5 v_ref", "return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item, index, context, selection,", "first paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return value return", "not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self): return PPTSymbolsSettings.unicode_font or", "\"\" #remove selected text first and then insert symbol self.insert_symbol_into_text(selection.TextRange2,", "fstallmann ''' from __future__ import absolute_import from collections import deque", "characters as symbol with special font (e.g. Arial Unicode) @classmethod", "Treemap continue return None def get_attr_from_textframe(self, textframe): return getattr(textframe, self.attr)", "in self._callbacks: if context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def get_item_count(self,", "\"?\") def button_get_label(self, index): try: return self.symbols[index][2] except: return \"Zuletzt", "import tempfile, os font = item[0] or self.fallback_font img =", "presentation): ''' creates an item image with target area according", "get_text(self, shapes, selection): value = self.get_attr_from_shapes(shapes, selection) if value is", "= 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft", "# reference size if reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide)", "self._callbacks: if context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def get_item_count(self, presentation):", "value): ''' Set attr for shapes ''' value = max(0,value)", "-2: #if values differ, set the same value as in", "columns = columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten Shapes auf eine", "for each paragraph, so get value from first paragraph value", "index: self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image, context=True), get_item_screentip = bkt.Callback(lambda index:", "same value as in the first paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter", "or percentage # reference: CONTENTE / SLIDE / ABS #", "reposition shapes according of settings in clicked element ''' item", "festgelegt. Dieser Bereich ist anschließend über die Gallery wählbar und", "add_to_recent(cls, item): try: #try to remove if already exists and", "slide, item): import tempfile, os font = item[0] or self.fallback_font", "\"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\",", "def get_item_screentip(self, index): # retrieve item-settings item = self.items[index] return", "differ, set the same value as in the first paragraph", "only shows unicode chars til f0ff. raise TypeError(\"character number to", "detected return None elif int(value) == -2147483648: #replace large negative", "shapes according of settings in clicked element ''' item =", "@classmethod def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap = pressed", "type(length_definition) in [int, float, long]: if length_definition < 0: #", "InsertSymbol is used before (it remains the font of the", "ref_height]) left = left /ref_width * width fill_width = fill_width", "to use TextRange2 as TextRange does not contain LeftIndent, etc.", "= 10 self.big_step = 3 self.small_step = 1 self.round_at =", "get_item_screentip(self, index): # retrieve item-settings item = self.items[index] return 'Positionierung:", "create bitmap, define pen/brush height = 40 width = height*16./9", "value) #need to use TextRange2 as TextRange does not contain", "get_item_image(self, index, presentation): ''' creates an item image with target", "= label, columns = columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten Shapes", "#if font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod def convert_into_text(cls):", "'SpaceBefore' def __init__(self, **kwargs): ''' attr examples: SpaceBefore, SpaceAfter, LeftIndent,", "self.round_at = 1 return getattr(par_format, self.attr) ### Setter methods ###", "ref_frame[2]) height = self.length_from_definition(pos_definition[3], ref_frame[3]) return left, top, width, height", "l = 0 for ldef in length_definition: l += self.length_from_definition(ldef,", "== 'CONTENT': v_offset = height/5 v_ref = (height*4)/5 left,top,fill_width,fill_height =", "#append symbol text # if item[0]: # char_inserted.Font.Name = item[0]", "e.g. Treemap continue try: if int(value) == -2147483648: #different values", "index=None): if index is None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0])", "0, 0, 1, 1], 'CONTENT'], [u\"2/3 Links\", [ 0, 0,", "check first if tag exists at all if pplib.ContentArea.isset_contentarea(presentation): left,", "\"+mn-cs\" char_inserted.Font.Name = font #font name return char_inserted else: return", "pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation): #due to performance check", "def on_action_indexed(self, selected_item, index, context, **kwargs): ''' reposition shapes according", "= pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except: shape.select() else: new_shape.select() else:", "= max(0,value) for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self,", "'CONTENT'], [u\"2/3 Rechts\", [1./3, 0, 2./3, 1], 'CONTENT'], [u\"1/2 Links\",", "= 0.2 self.small_step = 0.1 self.round_at = 1 return getattr(par_format,", "\"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\",", "frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation): #due", "img def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0], ref_frame[2]) +", "item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")),", "item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")),", "width top = top /ref_height * height fill_height = fill_height/ref_height", "== 3: # text selected self.set_attr_for_textrange(selection.TextRange2, value) #need to use", "settings in clicked element ''' item = self.items[index] position =", "my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item, index): self.locpin.index = index", "= Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height-1))", "selection, value): ''' Set attr for shapes ''' value =", "contain LeftIndent, etc. else: for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value)", "unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode characters as symbol with", "== \"SpaceAfter\" and par_format.LineRuleAfter == 0) or (self.attr == \"SpaceWithin\"", "None: #e.g. no textframe detected return None elif int(value) ==", "(\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ]", "self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation): #due to performance check first if", "#try to remove if already exists and add to beginning", "name def insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item))", "work) InsertSymbol does not work anymore. Also the default ppt", "selected_item, index): self.locpin.index = index def locpin_get_image(self, context, index=None): if", "in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self, textrange, value): #using textrange", "super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self, index): try: return self.symbols[index][2] except:", "is given, then insert as symbol font = item[0] or", "int(value) == -2147483648: #different values for each paragraph, so get", "self.items[index] # return \"%s\" % getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns] def", "if no text is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: #", "TextRange2 as TextRange does not contain LeftIndent, etc. else: for", "textframe! if self.attr == \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore == -2: #if", "] ) if not \"image\" in kwargs and not \"image_mso\"", "[ .5, 0, .5, 1], 'CONTENT'], [u\"1/3 Links\", [ 0,", ") ], **kwargs ) def on_action_indexed(self, selected_item, index, context, **kwargs):", "selected text first and then insert symbol self.insert_symbol_into_text(selection.TextRange2, item) elif", "if item[0]: # char_inserted.Font.Name = item[0] #font name def insert_symbol_into_shapes(self,", "item) # for shape in shapes: # if shape.HasTextFrame ==", "self.locpin.index), get_item_count = bkt.Callback(lambda: len(self.items)), get_item_label = bkt.Callback(lambda index: self.items[index][1]),", "context, index=None): if index is None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return", "all if pplib.ContentArea.isset_contentarea(presentation): left, top, width, height = pplib.ContentArea.read_contentarea(presentation) if", "for ldef in length_definition: l += self.length_from_definition(ldef, reference) return l", "reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else: # SLIDE /", "special font (e.g. Arial Unicode) @classmethod def add_to_recent(cls, item): try:", "cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls, pressed): cls.convert_into_shape =", "so that InsertSymbol behaves the same as InsertAfter return placeholder_char.InsertSymbol(font,", "\"image_mso\" in kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs)", "try: #try to remove if already exists and add to", "pressed cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap", "shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if item[0]: # shape.TextFrame.TextRange.Font.Name", "def __init__(self, **kwargs): ''' attr examples: SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent,", "= cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap", "into shape try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible", "writable, but they are not if InsertSymbol is used before", "index): self.locpin.index = index def locpin_get_image(self, context, index=None): if index", "= height/5 v_ref = (height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref])", "already exists and add to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError:", "shape in shapes: # if shape.HasTextFrame == -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange,", "on_change(self, shapes, selection, value): self.set_attr_for_shapes(shapes, selection, value) ### Getter Methods", "symbols into bitmap picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode", "unicode chars til f0ff. raise TypeError(\"character number to large for", "[1./3, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Rechts\", [2./3, 0, 1./3,", "len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation,", "if length_definition < 0: # negative values specify distance 'from", "textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr == \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter ==", "and NameComplexScript should be writable, but they are not if", "and not \"image_mso\" in kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs)", "len(self.items) == 12: self.items.pop() self.items.append([u\"Benutzerdef. Bereich\", [left, top, width, height],", "Links\", [ 0, 0, .5, 1], 'CONTENT'], [u\"1/2 Mitte\", [.25,", "bkt import dotnet Drawing = dotnet.import_drawing() from . import helpers", "can be absolute or percentage # reference: CONTENTE / SLIDE", "# values can be absolute or percentage # reference: CONTENTE", "show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda: self.locpin.index), get_item_count =", "/ref_width * width top = top /ref_height * height fill_height", "the specified item ''' # retrieve item-settings item = self.items[index]", "shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type == 3 and", "in der aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto )", "= item[1] reference = item[2] #self.change_position(selection, shapes, item[1]) # reference", "get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw.", "then insert symbol self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text() and selection.Type ==", "selection): value = self.get_attr_from_shapes(shapes, selection) if value is None: #e.g.", "\"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True else: my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"]", "def switch_convert_into_shape(cls, pressed): cls.convert_into_shape = pressed cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"]", "item) def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange, item): if", "get_attr_from_shapes(self, shapes, selection): if selection.Type == 3: # text selected", "textrange.ParagraphFormat.LineRuleAfter == -2: #if values differ, set the same value", "if reference == 'CONTENT': v_offset = height/5 v_ref = (height*4)/5", "cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod", "position, reference] # position: [left, top, width, height] # values", "\"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\",", "Setter methods ### def set_attr_for_shapes(self, shapes, selection, value): if self.attr", "they are not if InsertSymbol is used before (it remains", ". import helpers as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization", "into shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert newly inserted", "then insert as symbol font = item[0] or self.fallback_font try:", "value = self.get_attr_from_textrange(textframe.TextRange) except: # produces error for certain chart", "value): if self.attr != \"FirstLineIndent\": #FirstLineIndent can be negative! value", "= Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1)) g.DrawRectangle(pen,", "supertip=gal_item[2]) # for gal_item in self.items # ] ) if", "60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize", "1], 'CONTENT'], [u\"1/2 Mitte\", [.25, 0, .5, 1], 'CONTENT'], [u\"1/2", "[u\"2/3 Links\", [ 0, 0, 2./3, 1], 'CONTENT'], [u\"2/3 Rechts\",", "produces error for certain chart types, e.g. Treemap continue try:", "= font #if font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod", "bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None) tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png)", "SLIDE / ABS page_setup = context.presentation.PageSetup ref_left,ref_top = 0, 0", "self.attr == \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter == -2: #if values differ,", "context, selection: self.on_action_indexed(None, index, context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)),", "textrange.ParagraphFormat.LineRuleWithin == -2: #if values differ, set the same value", "shape = slide.shapes.AddPicture(tmpfile, 0, -1, 200, 200) #FileName, LinkToFile, SaveWithDocument,", "return left, top, width, height def length_from_definition(self, length_definition, reference): if", "pplib.iterate_shape_textframes(shapes): try: value = self.get_attr_from_textrange(textframe.TextRange) except: # produces error for", "== -2147483648: #different values for each paragraph, so get value", "= 0, 0 ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight # target size", "number (values differ between selected items) with fallback value return", "[u\"1/3 Links\", [ 0, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Mitte\",", "1], 'CONTENT'], [u\"1/3 Links\", [ 0, 0, 1./3, 1], 'CONTENT'],", "#e.g. no textframe detected return None elif int(value) == -2147483648:", "Unten\", [ 0, 5./6, 1, 1./6], 'CONTENT'] ] def __init__(self,", "return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): # items: [label, position, reference] #", "from __future__ import absolute_import from collections import deque import bkt", "\"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore == -2: #if values differ, set the", "'-###', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) if self.attr in [\"SpaceBefore\", \"SpaceAfter\",", "'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner", "else: my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"] = \"pt_to_cm\" if self.attr in", "1], 'CONTENT'], [u\"2/3 Links\", [ 0, 0, 2./3, 1], 'CONTENT'],", "> 61695: #for higher numbers (f0ff works, f100 doesnt work)", "#font name is given, then insert as symbol font =", "name # shape.TextFrame.TextRange.Text = item[1] #symbol text if PPTSymbolsSettings.get_convert_into_shape(): #convert", "LeftIndent, FirstLineIndent, LineSpacing ''' #self.attr is automatically set through RibbonControl", "used before (it remains the font of the symbol). only", "= self.items[index] return 'Positionierung: ' + item[0] def get_item_supertip(self, index):", "= bkt.Callback(lambda index: self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image, context=True), get_item_screentip =", "auf eine Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der benutzerdefinierte", "in the specified item ''' # retrieve item-settings item =", "convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert newly inserted symbols into", "CONTENTE / SLIDE / ABS # values are converted according", "my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"] = \"pt_to_cm\" if self.attr in [\"LeftIndent\",", "bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item, index, context, selection, **kwargs): ''' create", "not work for higher level unicode, e.g. emojis, and throws", "0 else: self.huge_step = 0.5 self.big_step = 0.2 self.small_step =", "getattr(par_format, self.attr) ### Setter methods ### def set_attr_for_shapes(self, shapes, selection,", "self.length_from_definition(ldef, reference) return l elif type(length_definition) in [int, float, long]:", "(cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls, pressed):", "image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten Shapes auf eine Standardposition.\", children=[ bkt.ribbon.Button(", "element ''' item = self.items[index] position = item[1] reference =", "numbers (f0ff works, f100 doesnt work) InsertSymbol does not work", "ref_frame[3]) return left, top, width, height def length_from_definition(self, length_definition, reference):", "1, 1./6], 'CONTENT'], [u\"1/6 Unten\", [ 0, 5./6, 1, 1./6],", "self.items # ] ) if not \"image\" in kwargs and", "same value as in the first paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore", "self.insert_symbol_into_text(textframe.TextRange, item) # for shape in shapes: # if shape.HasTextFrame", "text # if item[0]: # char_inserted.Font.Name = item[0] #font name", "cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod def switch_unicode_font(cls, font=None): cls.unicode_font =", "''' #self.attr is automatically set through RibbonControl attribute handling self.fallback_value", "get_enabled = bkt.get_enabled_auto ) ], **kwargs ) def on_action_indexed(self, selected_item,", "/ ABS ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width,", "ref_frame=[0,v_offset,width, v_ref]) else: # SLIDE / ABS ref_width,ref_height = presentation.PageSetup.SlideWidth,", "or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item, index, context, selection, **kwargs): '''", "index: self.items[index][2]), # children = [ # Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2])", "if (self.attr == \"SpaceBefore\" and par_format.LineRuleBefore == 0) or (self.attr", "width = self.length_from_definition(pos_definition[2], ref_frame[2]) height = self.length_from_definition(pos_definition[3], ref_frame[3]) return left,", "if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True else:", "10 self.big_step = 3 self.small_step = 1 self.round_at = 0", "def fallback_font(self): return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item, index,", "context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return", "self.attr == \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin == -2: #if values differ,", "produces error for certain chart types, e.g. Treemap continue return", "textframe in pplib.iterate_shape_textframes(shapes): try: value = self.get_attr_from_textrange(textframe.TextRange) except: # produces", "bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls): return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT))", "Getter Methods ### def get_attr_from_shapes(self, shapes, selection): ''' Get attr", "automatically set through RibbonControl attribute handling self.fallback_value = 0 my_kwargs", "try: return self.symbols[index] is not None except: return False def", "bitmap, define pen/brush height = 40 width = height*16./9 img", "PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item)", "according to reference items = [ [u\"<NAME>\", [ 0, 0,", "[u\"1/2 Rechts\", [ .5, 0, .5, 1], 'CONTENT'], [u\"1/3 Links\",", "[u\"1/6 Unten\", [ 0, 5./6, 1, 1./6], 'CONTENT'] ] def", "bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self): return", "0 shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop =", "shape.select() def create_symbol_bitmap(self, slide, item): import tempfile, os font =", "item-settings item = self.items[index] return 'Positionierung: ' + item[0] def", "Bereich wird anhand des gewählten Shapes festgelegt. Dieser Bereich ist", "- self.length_from_definition(-length_definition, reference) elif length_definition <= 1: # percentage values", "else: # shapes selected for textframe in pplib.iterate_shape_textframes(shapes): try: value", "angezeigten Position/Größe.' def create_image(self, position, reference, presentation): # create bitmap,", "as InsertAfter return placeholder_char.InsertSymbol(font, char_number, -1) #symbol: FontName, CharNumber (decimal),", "= False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def", "def get_enabled(self, shapes): # return True # def get_item_label(self, index):", "not work anymore. Also the default ppt symbol-picker only shows", "first paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value) class PPTSymbolsSettings(object):", "textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value) class PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []),", "= True, convert = 'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs)", "__init__(self, **kwargs): ''' attr examples: SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing", "way to replace these values and correctly show icon is", "self.create_image(item[1], item[2], presentation) def get_item_screentip(self, index): # retrieve item-settings item", "set the same value as in the first paragraph textrange.ParagraphFormat.LineRuleAfter", "= False cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] =", "self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self, slide, item): shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__,", "bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls, pressed): cls.convert_into_shape = pressed", "### Setter methods ### def set_attr_for_shapes(self, shapes, selection, value): '''", "== 1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame)", "creates an item image with target area according to settings", "**kwargs): ''' attr examples: SpaceBefore, SpaceAfter, LeftIndent, FirstLineIndent, LineSpacing '''", "= bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item, index):", "positions=None, label=\"Standardpositionen\", columns=3, **kwargs): self.items = positions or PositionGallery.items super(PositionGallery,", "size if reference == 'CONTENT': v_offset = height/5 v_ref =", "unicode, e.g. emojis, and throws TypeError if char_number > 61695:", "0.5 self.big_step = 0.2 self.small_step = 0.1 self.round_at = 1", "Fixierung bei Änderung {}\", **kwargs): self.locpin = locpin or pplib.GlobalLocPin", "''' value = max(0,value) for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value)", "(\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs = dict(", "switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap = pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] =", "item) else: self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self,", "40 width = height*16./9 img = Drawing.Bitmap(width, height) g =", "in shapes: # if shape.HasTextFrame == -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item)", "value return self.fallback_value else: return value def on_change(self, shapes, selection,", "self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self, index): try:", "a chart return self._get_attr(selection.TextRange2.ParagraphFormat) except: return None else: # shapes", "f0ff. raise TypeError(\"character number to large for InsertSymbol\") #fallback to", "else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod def convert_into_text(cls): return not", "emojis, and throws TypeError if char_number > 61695: #for higher", "#convert into shape or bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else:", "or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def fallback_font(self):", "# if shape.HasTextFrame == -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self,", "textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self, textrange, value): #using", "behaves the same as InsertAfter return placeholder_char.InsertSymbol(font, char_number, -1) #symbol:", "if selection.Type == 3: # text selected try: # produces", "def __init__(self, **kwargs): ''' attr examples: MarginTop, MarginBottom, MarginLeft, MarginRight", "context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item, index): self.locpin.index =", "else: new_shape.select() else: shape.select() def create_symbol_bitmap(self, slide, item): import tempfile,", "Undefined\" def button_get_visible(self, index): try: return self.symbols[index] is not None", "'from right' return reference - self.length_from_definition(-length_definition, reference) elif length_definition <=", "selection, value) ### Getter Methods ### def get_attr_from_shapes(self, shapes, selection):", "font = item[0] or self.fallback_font try: char_number = ord(item[1]) #ord", "selected items) with fallback value return self.fallback_value else: return value", "self.attr in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"] = 0.125", "textframe, value): setattr(textframe, self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization", "== \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin == -2: #if values differ, set", "values can be absolute or percentage # reference: CONTENTE /", "0 shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight = 0", "\"SpaceWithin\" and par_format.LineRuleWithin == 0): self.huge_step = 10 self.big_step =", "index, context, **kwargs): ''' reposition shapes according of settings in", "only way to replace these values and correctly show icon", "textrange.InsertAfter(item[1]) #append symbol text #so, NameFarEast and NameComplexScript should be", "children = [ # Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) # for gal_item", "supertip=u\"Positioniere die ausgewählten Shapes auf eine Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef.", "#different values for each paragraph, so get value from first", "item[0] #font name def insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes, lambda textframe:", "be absolute or percentage # reference: CONTENTE / SLIDE /", "presentation): # create bitmap, define pen/brush height = 40 width", "<filename>bkt/library/powerpoint/elements.py # -*- coding: utf-8 -*- ''' Created on 02.11.2017", "item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")),", "-*- coding: utf-8 -*- ''' Created on 02.11.2017 @author: fstallmann", "par_format.LineRuleBefore == 0) or (self.attr == \"SpaceAfter\" and par_format.LineRuleAfter ==", "that InsertSymbol behaves the same as InsertAfter return placeholder_char.InsertSymbol(font, char_number,", "None elif int(value) == -2147483648: #replace large negative number (values", "NameComplexScript should be writable, but they are not if InsertSymbol", "bitmap picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode characters as", "self.length_from_definition(-length_definition, reference) elif length_definition <= 1: # percentage values return", "reference items = [ [u\"<NAME>\", [ 0, 0, 1, 1],", "exists at all if pplib.ContentArea.isset_contentarea(presentation): left, top, width, height =", "**kwargs): ''' create numberd shape according of settings in clicked", "correctly show icon is setting it to '+mn-..' char_inserted.Font.NameFarEast =", "#pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange,", "1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft =", "''' # retrieve item-settings item = self.items[index] return self.create_image(item[1], item[2],", "try: # produces error if there is no textrange, e.g.", "if self.attr in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"] =", "set the same value as in the first paragraph textrange.ParagraphFormat.LineRuleBefore", "not contain LeftIndent, etc. else: for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange,", "item[1], 400, None) tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape", "self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None, index, context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)),", "0.2 self.small_step = 0.1 self.round_at = 1 return getattr(par_format, self.attr)", "def get_item_supertip(self, index): return 'Verwende angezeigten Position/Größe.' def create_image(self, position,", "Rechts\", [2./3, 0, 1./3, 1], 'CONTENT'], [u\"1/6 Oben\", [ 0,", "über die Gallery wählbar und wird dauerhaft in der aktuellen", "== 12: self.items.pop() self.items.append([u\"Benutzerdef. Bereich\", [left, top, width, height], 'ABS'])", "set_attr_for_shapes(self, shapes, selection, value): if self.attr != \"FirstLineIndent\": #FirstLineIndent can", "return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls,", "paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value) class PPTSymbolsSettings(object): recent_symbols", "attr examples: MarginTop, MarginBottom, MarginLeft, MarginRight ''' #self.attr is automatically", "distance 'from right' return reference - self.length_from_definition(-length_definition, reference) elif length_definition", "return self.symbols[index] is not None except: return False def get_index_as_button(self,", "value): #using textrange instead of textframe! if self.attr == \"SpaceBefore\"", "1, 100,100,200,200) shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom", "\"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin == -2: #if values differ, set the", "''' for textframe in pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe) except: #", "'+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name = font", ") my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ### def", "def set_attr_for_shapes(self, shapes, selection, value): ''' Set attr for shapes", "Oben\", [ 0, 0, 1, 1./6], 'CONTENT'], [u\"1/6 Unten\", [", "no text is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: # produces", "convert newly inserted symbols into shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False)", "= pplib.BoundingFrame.from_rect(left, top, width, height) if 'on_position_change' in self._callbacks: if", "if selection.Type == 3: # text selected self.set_attr_for_textrange(selection.TextRange2, value) #need", "specify distance 'from right' return reference - self.length_from_definition(-length_definition, reference) elif", "bkt.apps.ppt_selection_contains_textframe, ) if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] =", "= self.items[index] return self.create_image(item[1], item[2], presentation) def get_item_screentip(self, index): #", "if item[0] or PPTSymbolsSettings.unicode_font is not None: #font name is", "left, top, width, height = pplib.ContentArea.read_contentarea(presentation) if len(self.items) == 12:", "super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self, index):", "Methods ### def get_attr_from_shapes(self, shapes, selection): ''' Get attr for", "is used before (it remains the font of the symbol).", "= 0 my_kwargs = dict( size_string = '-###', get_enabled =", "to remove if already exists and add to beginning cls.recent_symbols.remove(item)", "elif int(value) == -2147483648: #replace large negative number (values differ", "dict( size_string = '-###', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) if self.attr", "should be writable, but they are not if InsertSymbol is", "# produces error for certain chart types, e.g. Treemap continue", "Dieser Bereich ist anschließend über die Gallery wählbar und wird", "id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None, index, context,", "# target size left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame =", "(f0ff works, f100 doesnt work) InsertSymbol does not work anymore.", "[2./3, 0, 1./3, 1], 'CONTENT'], [u\"1/6 Oben\", [ 0, 0,", "return self.symbols[index][2] except: return \"Zuletzt verwendet: Undefined\" def button_get_visible(self, index):", "return None def get_attr_from_textframe(self, textframe): return getattr(textframe, self.attr) ### Setter", "or PositionGallery.items super(PositionGallery, self).__init__( label = label, columns = columns,", "value) def set_attr_for_textframe(self, textframe, value): setattr(textframe, self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox):", "given, then insert as symbol font = item[0] or self.fallback_font", "be writable, but they are not if InsertSymbol is used", "== -2: #if values differ, set the same value as", "* length_definition else: # absolute values return length_definition else: return", "on_action_indexed(self, selected_item, index, context, selection, **kwargs): ''' create numberd shape", "= 'MarginTop' def __init__(self, **kwargs): ''' attr examples: MarginTop, MarginBottom,", "orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape", "0 my_kwargs = dict( size_string = '-###', get_enabled = bkt.apps.ppt_selection_contains_textframe,", "# SLIDE / ABS page_setup = context.presentation.PageSetup ref_left,ref_top = 0,", "bkt.Callback(lambda index: self.items[index][2]), # children = [ # Item(image=gal_item[0], screentip=gal_item[1],", "#symbol: FontName, CharNumber (decimal), Unicode=True except TypeError: char_inserted = textrange.InsertAfter(item[1])", "get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None,", "items) with fallback value return self.fallback_value else: return value def", "-1, 200, 200) #FileName, LinkToFile, SaveWithDocument, Left, Top shape.select() os.remove(tmpfile)", "rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0] top", "to replace these values and correctly show icon is setting", "for textframe in pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe) except: # produces", "self.items = [ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\",", "True) #always convert newly inserted symbols into shapes convert_into_bitmap =", "in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) # for shape in shapes: #", "height def length_from_definition(self, length_definition, reference): if type(length_definition) == list: #", "ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left", "bei Änderung {}\", **kwargs): self.locpin = locpin or pplib.GlobalLocPin self.items", "#fallback to InsertAfter placeholder_char = textrange.InsertAfter(\"X\") #append placeholder symbol so", "items = [ [u\"<NAME>\", [ 0, 0, 1, 1], 'CONTENT'],", "item[1] reference = item[2] #self.change_position(selection, shapes, item[1]) # reference size", "context, **kwargs): ''' reposition shapes according of settings in clicked", "len(self.items) # def get_enabled(self, shapes): # return True # def", "symbol-picker only shows unicode chars til f0ff. raise TypeError(\"character number", "return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: # produces error if there is", "super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ### def get_text(self, shapes,", "self.length_from_definition(pos_definition[2], ref_frame[2]) height = self.length_from_definition(pos_definition[3], ref_frame[3]) return left, top, width,", "return None else: # shapes selected for textframe in pplib.iterate_shape_textframes(shapes):", "selection, **kwargs): ''' create numberd shape according of settings in", "return None def get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format):", "(values differ between selected items) with fallback value return self.fallback_value", "Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1) g.DrawRectangle(pen,", "picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode characters as symbol", "for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self, textrange, value):", "the same value as in the first paragraph textrange.ParagraphFormat.LineRuleBefore =", "Methods ### def get_attr_from_shapes(self, shapes, selection): if selection.Type == 3:", "return self.get_attr_from_textframe(textframe) except: # produces error for certain chart types,", "pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except: shape.select() else: new_shape.select() else: shape.select()", "first if tag exists at all if pplib.ContentArea.isset_contentarea(presentation): left, top,", "if pplib.ContentArea.isset_contentarea(presentation): left, top, width, height = pplib.ContentArea.read_contentarea(presentation) if len(self.items)", "shape or bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1), item)", "bkt.Callback(lambda index: self.items[index][1]), get_item_supertip = bkt.Callback(lambda index: self.items[index][2]), # children", "selection): ''' Get attr for shapes ''' for textframe in", "item[0]: # char_inserted.Font.Name = item[0] #font name def insert_symbol_into_shapes(self, shapes,", "but they are not if InsertSymbol is used before (it", "= 0 shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if item[0]:", "lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item)", "import bkt from bkt import dotnet Drawing = dotnet.import_drawing() from", "= textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr == \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin == -2:", "[150, 50%] l = 0 for ldef in length_definition: l", "def length_from_definition(self, length_definition, reference): if type(length_definition) == list: # allow", "if shape.HasTextFrame == -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self, slide,", "item_supertip.format(\"unten-rechts\")), ] my_kwargs = dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\",", "reference == 'CONTENT': v_offset = height/5 v_ref = (height*4)/5 left,top,fill_width,fill_height", "/ref_width * width fill_width = fill_width /ref_width * width top", "columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten Shapes auf eine Standardposition.\", children=[", "Also the default ppt symbol-picker only shows unicode chars til", "= self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return value return None def get_attr_from_textrange(self,", "get_enabled = bkt.apps.ppt_selection_contains_textframe, ) if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]:", "anschließend über die Gallery wählbar und wird dauerhaft in der", "### Spinner Box callbacks ### def get_text(self, shapes, selection): value", "ref_frame[2]) + ref_frame[0] top = self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1] width", "\"%s\" % getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns] def get_item_image(self, index, presentation):", "color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height)))", "not \"image\" in kwargs and not \"image_mso\" in kwargs: my_kwargs[\"get_image\"]", "item[0] or self.fallback_font try: char_number = ord(item[1]) #ord does not", "as in the first paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr", "PositionGallery(bkt.ribbon.Gallery): # items: [label, position, reference] # position: [left, top,", "= item[0] #font name # shape.TextFrame.TextRange.Text = item[1] #symbol text", "position = item[1] reference = item[2] #self.change_position(selection, shapes, item[1]) #", "+ ref_frame[0] top = self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1] width =", "bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod def convert_into_text(cls): return not (cls.convert_into_shape or", "label = label, columns = columns, image_mso='PositionAnchoringGallery', supertip=u\"Positioniere die ausgewählten", "Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color =", "reference) return l elif type(length_definition) in [int, float, long]: if", "pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def", "@classmethod def switch_convert_into_text(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap = False", "/ref_height * height fill_height = fill_height/ref_height * height color =", "# def get_enabled(self, shapes): # return True # def get_item_label(self,", "left = self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0] top = self.length_from_definition(pos_definition[1], ref_frame[3])", "context=True), get_item_screentip = bkt.Callback(lambda index: self.items[index][1]), get_item_supertip = bkt.Callback(lambda index:", "gewählten Shapes festgelegt. Dieser Bereich ist anschließend über die Gallery", "selection, value): if self.attr != \"FirstLineIndent\": #FirstLineIndent can be negative!", "3: # text selected try: # produces error if no", "= cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls, pressed): cls.convert_into_shape", "get_index_as_button(self, index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context,", "textframe detected return None elif int(value) == -2147483648: #replace large", "else: for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textrange(textframe.TextRange, value) def set_attr_for_textrange(self, textrange,", "the first paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr == \"SpaceWithin\"", "top = self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1] width = self.length_from_definition(pos_definition[2], ref_frame[2])", "= \"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name = font #font name", "shapes, item): #pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for textframe in", "on 02.11.2017 @author: fstallmann ''' from __future__ import absolute_import from", "= 40 width = height*16./9 img = Drawing.Bitmap(width, height) g", "as in the first paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr", "InsertSymbol does not work anymore. Also the default ppt symbol-picker", "= bkt.settings.get(\"bkt.symbols.unicode_font\", None) #insert unicode characters as symbol with special", "\"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"] = 0.125", "img = Drawing.Bitmap(width, height) g = Drawing.Graphics.FromImage(img) # reference size", "selection): if selection.Type == 3: # text selected try: #", "else: shape.select() def create_symbol_bitmap(self, slide, item): import tempfile, os font", "except: return \"Zuletzt verwendet: Undefined\" def button_get_visible(self, index): try: return", "0, 5./6, 1, 1./6], 'CONTENT'] ] def __init__(self, positions=None, label=\"Standardpositionen\",", "1, 1./6], 'CONTENT'] ] def __init__(self, positions=None, label=\"Standardpositionen\", columns=3, **kwargs):", "02.11.2017 @author: fstallmann ''' from __future__ import absolute_import from collections", "# shape.TextFrame.TextRange.Font.Name = item[0] #font name # shape.TextFrame.TextRange.Text = item[1]", "return len(self.items) # def get_enabled(self, shapes): # return True #", "shapes, selection): ''' Get attr for shapes ''' for textframe", "round(fill_width), round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0,", "return img def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0], ref_frame[2])", "return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return len(self.items)", "selection) if value is None: #e.g. no textframe detected return", "elif PPTSymbolsSettings.convert_into_text() and selection.Type == 2 and not shift_or_ctrl: #shapes", "in the first paragraph textrange.ParagraphFormat.LineRuleWithin = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleWithin setattr(textrange.ParagraphFormat, self.attr, value)", "of textframe! if self.attr == \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore == -2:", "my_kwargs = dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed", "convert_into_text(cls): return not (cls.convert_into_shape or cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls, pressed):", "set_userdefined_area(self, presentation, shapes): if len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else:", "shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size", "shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if", "cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls): return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not", "else: self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange,", "attr for shapes ''' value = max(0,value) for textframe in", "top, width, height) if 'on_position_change' in self._callbacks: if context: return", "= pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation): #due to", "ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else: # SLIDE / ABS page_setup =", "exists and add to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item)", "[u\"1/3 Rechts\", [2./3, 0, 1./3, 1], 'CONTENT'], [u\"1/6 Oben\", [", "type(length_definition) == list: # allow [150, 50%] l = 0", "return getattr(textframe, self.attr) ### Setter methods ### def set_attr_for_shapes(self, shapes,", "cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or", "[ 0, 0, 1, 1], 'CONTENT'], [u\"2/3 Links\", [ 0,", "item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda: self.locpin.index),", "paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr == \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter", "else: return textrange.InsertAfter(item[1]) #append symbol text # if item[0]: #", "image with target area according to settings in the specified", "self.set_attr_for_shapes(shapes, selection, value) ### Getter Methods ### def get_attr_from_shapes(self, shapes,", "def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0]", "font #if font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod def", "return placeholder_char.InsertSymbol(font, char_number, -1) #symbol: FontName, CharNumber (decimal), Unicode=True except", "2./3, 1], 'CONTENT'], [u\"1/2 Links\", [ 0, 0, .5, 1],", "in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"]", "height = pplib.ContentArea.read_contentarea(presentation) if len(self.items) == 12: self.items.pop() self.items.append([u\"Benutzerdef. Bereich\",", "label=\"Standardpositionen\", columns=3, **kwargs): self.items = positions or PositionGallery.items super(PositionGallery, self).__init__(", "certain chart types, e.g. Treemap continue return None def get_attr_from_textframe(self,", "dict( size_string = '###', round_cm = True, convert = 'pt_to_cm',", "def set_attr_for_textframe(self, textframe, value): setattr(textframe, self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ###", "create_symbol_bitmap(self, slide, item): import tempfile, os font = item[0] or", "0, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Mitte\", [1./3, 0, 1./3,", "for textframe in pplib.iterate_shape_textframes(shapes): try: value = self.get_attr_from_textrange(textframe.TextRange) except: #", "self.insert_symbol_into_text(textframe.TextRange, item)) for textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) # for", "is None: return context.python_addin.load_image(self.items[self.locpin.index][0]) else: return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): #", "my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ### def get_text(self,", "item[0] #font name # shape.TextFrame.TextRange.Text = item[1] #symbol text if", "CharNumber (decimal), Unicode=True except TypeError: char_inserted = textrange.InsertAfter(item[1]) #append symbol", "self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self, textframe, value): setattr(textframe, self.attr, value) class", "show icon is setting it to '+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\"", "except: pass return value return None def get_attr_from_textrange(self, textrange): return", "if 'on_position_change' in self._callbacks: if context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs)", "presentation): #due to performance check first if tag exists at", "pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation): #due to performance", "0 shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) #", "switch_convert_into_text(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] =", "class PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\",", "= item[0] #font name def insert_symbol_into_shapes(self, shapes, item): #pplib.iterate_shape_textframes(shapes, lambda", "#insert unicode characters as symbol with special font (e.g. Arial", "\"pt_to_cm\" if self.attr in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"]", "height fill_height = fill_height/ref_height * height color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush", "MarginLeft, MarginRight ''' #self.attr is automatically set through RibbonControl attribute", "try: if int(value) == -2147483648: #different values for each paragraph,", "\"image\" in kwargs and not \"image_mso\" in kwargs: my_kwargs[\"get_image\"] =", "= 0 else: self.huge_step = 0.5 self.big_step = 0.2 self.small_step", "\"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name = font #font name return", "absolute or percentage # reference: CONTENTE / SLIDE / ABS", "self.locpin.index = index def locpin_get_image(self, context, index=None): if index is", "None def get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format): if", "InsertSymbol behaves the same as InsertAfter return placeholder_char.InsertSymbol(font, char_number, -1)", "die ausgewählten Shapes auf eine Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich", "value is None: #e.g. no textframe detected return None elif", "= 0.5 self.big_step = 0.2 self.small_step = 0.1 self.round_at =", "'CONTENT'], [u\"1/3 Mitte\", [1./3, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Rechts\",", "dotnet.import_drawing() from . import helpers as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ###", "\"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\",", "number to large for InsertSymbol\") #fallback to InsertAfter placeholder_char =", "Links\", [ 0, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Mitte\", [1./3,", "long]: if length_definition < 0: # negative values specify distance", "1./6], 'CONTENT'], [u\"1/6 Unten\", [ 0, 5./6, 1, 1./6], 'CONTENT']", "self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'SpaceBefore'", "higher numbers (f0ff works, f100 doesnt work) InsertSymbol does not", "self.items[index] position = item[1] reference = item[2] #self.change_position(selection, shapes, item[1])", "from first paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return value", "1], 'CONTENT'], [u\"1/2 Rechts\", [ .5, 0, .5, 1], 'CONTENT'],", "value = max(0,value) for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def", "items: [label, position, reference] # position: [left, top, width, height]", "in the first paragraph textrange.ParagraphFormat.LineRuleBefore = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr ==", "at all if pplib.ContentArea.isset_contentarea(presentation): left, top, width, height = pplib.ContentArea.read_contentarea(presentation)", "button_get_visible(self, index): try: return self.symbols[index] is not None except: return", "= bkt.get_enabled_auto ) ], **kwargs ) def on_action_indexed(self, selected_item, index,", "# reference: CONTENTE / SLIDE / ABS # values are", "== list: # allow [150, 50%] l = 0 for", "attr = 'SpaceBefore' def __init__(self, **kwargs): ''' attr examples: SpaceBefore,", "# for shape in shapes: # if shape.HasTextFrame == -1:", "for certain chart types, e.g. Treemap continue return None def", "for shapes ''' value = max(0,value) for textframe in pplib.iterate_shape_textframes(shapes):", "''' Get attr for shapes ''' for textframe in pplib.iterate_shape_textframes(shapes):", "par_format): if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if (self.attr ==", "cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod def switch_unicode_font(cls,", "and add to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"]", "= 0 for ldef in length_definition: l += self.length_from_definition(ldef, reference)", "width = height*16./9 img = Drawing.Bitmap(width, height) g = Drawing.Graphics.FromImage(img)", "None) #insert unicode characters as symbol with special font (e.g.", "ref_frame[1] width = self.length_from_definition(pos_definition[2], ref_frame[2]) height = self.length_from_definition(pos_definition[3], ref_frame[3]) return", "''' reposition shapes according of settings in clicked element '''", "bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL))", "0, 1./3, 1], 'CONTENT'], [u\"1/3 Rechts\", [2./3, 0, 1./3, 1],", "continue return None def get_attr_from_textframe(self, textframe): return getattr(textframe, self.attr) ###", "\"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\",", "self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if (self.attr == \"SpaceBefore\" and", "#font name # shape.TextFrame.TextRange.Text = item[1] #symbol text if PPTSymbolsSettings.get_convert_into_shape():", "return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property", "shape.TextFrame.TextRange.Font.Name = item[0] #font name # shape.TextFrame.TextRange.Text = item[1] #symbol", "char_number = ord(item[1]) #ord does not work for higher level", "bkt.get_key_state(bkt.KeyCodes.CTRL) or bkt.get_key_state(bkt.KeyCodes.SHIFT) if selection.Type == 3 and not shift_or_ctrl:", "item) def create_symbol_shape(self, slide, item): shape = slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1,", "= context.presentation.PageSetup ref_left,ref_top = 0, 0 ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight", "ref_frame[3]) + ref_frame[1] width = self.length_from_definition(pos_definition[2], ref_frame[2]) height = self.length_from_definition(pos_definition[3],", "target area according to settings in the specified item '''", "Drawing.Graphics.FromImage(img) # reference size if reference == 'CONTENT': v_offset =", "item): if item[0] or PPTSymbolsSettings.unicode_font is not None: #font name", "item[1] #symbol text if PPTSymbolsSettings.get_convert_into_shape(): #convert into shape try: orig_fontsize", "examples: MarginTop, MarginBottom, MarginLeft, MarginRight ''' #self.attr is automatically set", "os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter def", "else: return context.python_addin.load_image(self.items[index][0]) class PositionGallery(bkt.ribbon.Gallery): # items: [label, position, reference]", "height = self.length_from_definition(pos_definition[3], ref_frame[3]) return left, top, width, height def", "return 10 ## userdefined area def set_userdefined_area(self, presentation, shapes): if", "= dict( size_string = '-###', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) if", "dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed),", "### Instance initialization attr = 'MarginTop' def __init__(self, **kwargs): '''", "self.attr != \"FirstLineIndent\": #FirstLineIndent can be negative! value = max(0,value)", "be negative! value = max(0,value) if selection.Type == 3: #", "ist anschließend über die Gallery wählbar und wird dauerhaft in", "textrange, item): if item[0] or PPTSymbolsSettings.unicode_font is not None: #font", "til f0ff. raise TypeError(\"character number to large for InsertSymbol\") #fallback", "== -1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self, slide, item): shape", "**kwargs ) def on_action_indexed(self, selected_item, index, context, **kwargs): ''' reposition", "in pplib.iterate_shape_textframes(shapes): try: value = self.get_attr_from_textrange(textframe.TextRange) except: # produces error", "self.init_userdefined_area_item(presentation) return len(self.items) # def get_enabled(self, shapes): # return True", "the same value as in the first paragraph textrange.ParagraphFormat.LineRuleWithin =", "in [int, float, long]: if length_definition < 0: # negative", "if len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame = pplib.BoundingFrame.from_shapes(shapes)", "bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda: self.locpin.index), get_item_count = bkt.Callback(lambda: len(self.items)), get_item_label", "reference - self.length_from_definition(-length_definition, reference) elif length_definition <= 1: # percentage", "= pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def", "\"SpaceAfter\" and par_format.LineRuleAfter == 0) or (self.attr == \"SpaceWithin\" and", "my_kwargs[\"big_step\"] = 0.25 my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs)", "not None except: return False def get_index_as_button(self, index): return bkt.ribbon.Button(", "self.items[index][2]), # children = [ # Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) #", "convert = 'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs)", "reference = item[2] #self.change_position(selection, shapes, item[1]) # reference size if", "set through RibbonControl attribute handling self.fallback_value = 0 my_kwargs =", "pass return value return None def get_attr_from_textrange(self, textrange): return self._get_attr(textrange.ParagraphFormat)", "textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) #", "super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item, index): self.locpin.index = index def", "certain chart types, e.g. Treemap continue try: if int(value) ==", "not shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert into shape", "self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert into shape or bitmap if PPTSymbolsSettings.get_convert_into_bitmap():", ".5, 0, .5, 1], 'CONTENT'], [u\"1/3 Links\", [ 0, 0,", "''' Set attr for shapes ''' value = max(0,value) for", "### def get_attr_from_shapes(self, shapes, selection): if selection.Type == 3: #", "# children = [ # Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) # for", "def get_text(self, shapes, selection): value = self.get_attr_from_shapes(shapes, selection) if value", "name is given, then insert as symbol font = item[0]", "it to '+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript = \"+mn-cs\" char_inserted.Font.Name", "1], 'CONTENT'], [u\"1/3 Rechts\", [2./3, 0, 1./3, 1], 'CONTENT'], [u\"1/6", "textrange, e.g. selection within a chart return self._get_attr(selection.TextRange2.ParagraphFormat) except: return", "= Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color", "get_convert_into_shape(cls): return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def", "anymore. Also the default ppt symbol-picker only shows unicode chars", "festlegen\", supertip=\"Der benutzerdefinierte Bereich wird anhand des gewählten Shapes festgelegt.", "my_kwargs[\"round_pt\"] = True else: my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"] = \"pt_to_cm\"", "item = self.items[index] position = item[1] reference = item[2] #self.change_position(selection,", "is selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: # produces error if", "== \"SpaceBefore\" and par_format.LineRuleBefore == 0) or (self.attr == \"SpaceAfter\"", "cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not", "except: return False def get_index_as_button(self, index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index),", "not None: #font name is given, then insert as symbol", "def on_action_indexed(self, selected_item, index, context, selection, **kwargs): ''' create numberd", "TypeError if char_number > 61695: #for higher numbers (f0ff works,", "values specify distance 'from right' return reference - self.length_from_definition(-length_definition, reference)", "@property def fallback_font(self): return PPTSymbolsSettings.unicode_font or bkt.ribbon.SymbolsGallery.fallback_font def on_action_indexed(self, selected_item,", "'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else: # SLIDE / ABS page_setup", "MarginRight ''' #self.attr is automatically set through RibbonControl attribute handling", "def set_attr_for_shapes(self, shapes, selection, value): if self.attr != \"FirstLineIndent\": #FirstLineIndent", "] my_kwargs = dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False,", "convert newly inserted symbols into bitmap picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\",", "newly inserted symbols into bitmap picture unicode_font = bkt.settings.get(\"bkt.symbols.unicode_font\", None)", "allow [150, 50%] l = 0 for ldef in length_definition:", "else: frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation) def init_userdefined_area_item(self, presentation):", "1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame = pplib.BoundingFrame.from_shapes(shapes) pplib.ContentArea.define_contentarea(presentation, frame) self.init_userdefined_area_item(presentation)", "callbacks ### def get_text(self, shapes, selection): value = self.get_attr_from_shapes(shapes, selection)", "-1: # self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) def create_symbol_shape(self, slide, item): shape =", "and not shift_or_ctrl: #text selected selection.TextRange2.Text = \"\" #remove selected", "item ''' # retrieve item-settings item = self.items[index] return self.create_image(item[1],", "index): try: return self.symbols[index][2] except: return \"Zuletzt verwendet: Undefined\" def", "fill_height = fill_height/ref_height * height color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush =", "get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return len(self.items) # def get_enabled(self, shapes): #", "define pen/brush height = 40 width = height*16./9 img =", "return length_definition else: return 10 ## userdefined area def set_userdefined_area(self,", "pplib.ContentArea.isset_contentarea(presentation): left, top, width, height = pplib.ContentArea.read_contentarea(presentation) if len(self.items) ==", "pass def get_item_image(self, index): try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return", "textframe): return getattr(textframe, self.attr) ### Setter methods ### def set_attr_for_shapes(self,", "bkt.Callback(lambda: self.locpin.index), get_item_count = bkt.Callback(lambda: len(self.items)), get_item_label = bkt.Callback(lambda index:", "3 self.small_step = 1 self.round_at = 0 else: self.huge_step =", "= height*16./9 img = Drawing.Bitmap(width, height) g = Drawing.Graphics.FromImage(img) #", "# Item(image=gal_item[0], screentip=gal_item[1], supertip=gal_item[2]) # for gal_item in self.items #", "= dotnet.import_drawing() from . import helpers as pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox):", "0, .5, 1], 'CONTENT'], [u\"1/3 Links\", [ 0, 0, 1./3,", "self.insert_symbol_into_text(shape.TextFrame2.TextRange, item) # if item[0]: # shape.TextFrame.TextRange.Font.Name = item[0] #font", "= self.length_from_definition(pos_definition[2], ref_frame[2]) height = self.length_from_definition(pos_definition[3], ref_frame[3]) return left, top,", "or self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None) tmpfile =", "# SLIDE / ABS ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height =", "1: # percentage values return reference * length_definition else: #", "values and correctly show icon is setting it to '+mn-..'", "bkt.get_enabled_auto ) ], **kwargs ) def on_action_indexed(self, selected_item, index, context,", "set the same value as in the first paragraph textrange.ParagraphFormat.LineRuleWithin", "= self.items[index] # return \"%s\" % getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns]", "methods ### def set_attr_for_shapes(self, shapes, selection, value): ''' Set attr", "textframe in pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe) except: # produces error", "textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) # for shape in shapes:", "attr = 'MarginTop' def __init__(self, **kwargs): ''' attr examples: MarginTop,", "height-1)) return img def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left = self.length_from_definition(pos_definition[0],", "class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'SpaceBefore' def __init__(self,", "= bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert newly inserted symbols into bitmap", "True my_kwargs[\"convert\"] = \"pt_to_cm\" if self.attr in [\"LeftIndent\", \"FirstLineIndent\"]: my_kwargs[\"big_step\"]", "value = max(0,value) if selection.Type == 3: # text selected", "return self._get_attr(selection.TextRange2.ParagraphFormat) except: return None else: # shapes selected for", "or cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls, pressed): cls.convert_into_shape = False cls.convert_into_bitmap", "= 3 self.small_step = 1 self.round_at = 0 else: self.huge_step", "shapes ''' for textframe in pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe) except:", "work anymore. Also the default ppt symbol-picker only shows unicode", "get_enabled = bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box", "are converted according to reference items = [ [u\"<NAME>\", [", "None def get_attr_from_textframe(self, textframe): return getattr(textframe, self.attr) ### Setter methods", "== 0) or (self.attr == \"SpaceAfter\" and par_format.LineRuleAfter == 0)", "= bkt.Callback(lambda index: self.items[index][1]), get_item_supertip = bkt.Callback(lambda index: self.items[index][2]), #", "= item[2] #self.change_position(selection, shapes, item[1]) # reference size if reference", "dotnet Drawing = dotnet.import_drawing() from . import helpers as pplib", "PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange, item): if item[0] or PPTSymbolsSettings.unicode_font is", "deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert newly", "through RibbonControl attribute handling self.fallback_value = 0 my_kwargs = dict(", "or bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1), item) def", "try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def", "Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto ) ], **kwargs )", "def get_convert_into_shape(cls): return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod", "reference, presentation): # create bitmap, define pen/brush height = 40", "convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert newly inserted symbols into", "eine Standardposition.\", children=[ bkt.ribbon.Button( label=\"Benutzerdef. Bereich festlegen\", supertip=\"Der benutzerdefinierte Bereich", "item = self.items[index] return 'Positionierung: ' + item[0] def get_item_supertip(self,", "frame = pplib.BoundingFrame.from_rect(left, top, width, height) if 'on_position_change' in self._callbacks:", "= bkt.Callback(lambda: len(self.items)), get_item_label = bkt.Callback(lambda index: self.items[index][1]), get_item_image =", "fill_height/ref_height * height color = Drawing.ColorTranslator.FromHtml('#ffdd0000') brush = Drawing.SolidBrush(color) g.FillRectangle(brush,", "return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self, index): try: return self.symbols[index][2]", "1], 'CONTENT'], [u\"1/2 Links\", [ 0, 0, .5, 1], 'CONTENT'],", "### Setter methods ### def set_attr_for_shapes(self, shapes, selection, value): if", "SLIDE / ABS ref_width,ref_height = presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position,", "200, 200) #FileName, LinkToFile, SaveWithDocument, Left, Top shape.select() os.remove(tmpfile) class", "'CONTENT'] ] def __init__(self, positions=None, label=\"Standardpositionen\", columns=3, **kwargs): self.items =", "(\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\",", "shapes): if len(shapes) == 1: pplib.ContentArea.define_contentarea(presentation, shapes[0]) else: frame =", "= cls.unicode_font @classmethod def convert_into_text(cls): return not (cls.convert_into_shape or cls.convert_into_bitmap)", "= font #font name return char_inserted else: return textrange.InsertAfter(item[1]) #append", "die Gallery wählbar und wird dauerhaft in der aktuellen Prästentation", "0 for ldef in length_definition: l += self.length_from_definition(ldef, reference) return", "InsertSymbol\") #fallback to InsertAfter placeholder_char = textrange.InsertAfter(\"X\") #append placeholder symbol", "page_setup.SlideWidth, page_setup.SlideHeight # target size left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height])", "= cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls): return (cls.convert_into_shape", ") if not \"image\" in kwargs and not \"image_mso\" in", "shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight = 0 self.insert_symbol_into_text(shape.TextFrame2.TextRange,", "cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod def", "self.fallback_value = 0 my_kwargs = dict( size_string = '-###', get_enabled", "\"Zuletzt verwendet: Undefined\" def button_get_visible(self, index): try: return self.symbols[index] is", "= (height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else: # SLIDE", "raise TypeError(\"character number to large for InsertSymbol\") #fallback to InsertAfter", "selection.Type == 3 and not shift_or_ctrl: #text selected selection.TextRange2.Text =", "bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self, selected_item, index): self.locpin.index", "on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index = bkt.Callback(lambda: self.locpin.index), get_item_count = bkt.Callback(lambda:", "cls.convert_into_bitmap @classmethod def switch_convert_into_shape(cls, pressed): cls.convert_into_shape = pressed cls.convert_into_bitmap =", "@author: fstallmann ''' from __future__ import absolute_import from collections import", "self.get_attr_from_textrange(textframe.TextRange) except: # produces error for certain chart types, e.g.", "values for each paragraph, so get value from first paragraph", "1], 'CONTENT'], [u\"2/3 Rechts\", [1./3, 0, 2./3, 1], 'CONTENT'], [u\"1/2", "values return reference * length_definition else: # absolute values return", "0, 0 ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight # target size left,top,width,height", "1 return getattr(par_format, self.attr) ### Setter methods ### def set_attr_for_shapes(self,", "and correctly show icon is setting it to '+mn-..' char_inserted.Font.NameFarEast", "Drawing.Rectangle(0,0, width-1, height-1)) return img def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left", "ref_left,ref_top = 0, 0 ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight # target", "text first and then insert symbol self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text()", "self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: # produces error if there is no", "= \"\" #remove selected text first and then insert symbol", "f100 doesnt work) InsertSymbol does not work anymore. Also the", "return self._get_attr(textrange.ParagraphFormat) def _get_attr(self, par_format): if self.attr in [\"SpaceBefore\", \"SpaceAfter\",", "bzw. Fixierung bei Änderung {}\", **kwargs): self.locpin = locpin or", "not if InsertSymbol is used before (it remains the font", "top, width, height] # values can be absolute or percentage", "getattr(textframe, self.attr) ### Setter methods ### def set_attr_for_shapes(self, shapes, selection,", "retrieve item-settings item = self.items[index] return self.create_image(item[1], item[2], presentation) def", "level unicode, e.g. emojis, and throws TypeError if char_number >", "= ord(item[1]) #ord does not work for higher level unicode,", "''' attr examples: MarginTop, MarginBottom, MarginLeft, MarginRight ''' #self.attr is", "value): setattr(textframe, self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr", "= pplib.ContentArea.read_contentarea(presentation) if len(self.items) == 12: self.items.pop() self.items.append([u\"Benutzerdef. Bereich\", [left,", "def on_change(self, shapes, selection, value): self.set_attr_for_shapes(shapes, selection, value) ### Getter", "get_enabled(self, shapes): # return True # def get_item_label(self, index): #", "(\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs = dict( # get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\",", "is setting it to '+mn-..' char_inserted.Font.NameFarEast = \"+mn-ea\" char_inserted.Font.NameComplexScript =", "get_item_image(self, index): try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\",", "[ 0, 0, 1, 1./6], 'CONTENT'], [u\"1/6 Unten\", [ 0,", "item): try: #try to remove if already exists and add", "text if PPTSymbolsSettings.get_convert_into_shape(): #convert into shape try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size", "0) or (self.attr == \"SpaceWithin\" and par_format.LineRuleWithin == 0): self.huge_step", "presentation) def get_item_screentip(self, index): # retrieve item-settings item = self.items[index]", "/ ABS # values are converted according to reference items", "and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape = False", "set_attr_for_textframe(self, textframe, value): setattr(textframe, self.attr, value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance", "#convert into shape try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60", "index def locpin_get_image(self, context, index=None): if index is None: return", "for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self, textframe, value):", "# create bitmap, define pen/brush height = 40 width =", "the symbol). only way to replace these values and correctly", "### def get_attr_from_shapes(self, shapes, selection): ''' Get attr for shapes", "selected return self._get_attr(selection.TextRange2.Paragraphs(1,1).ParagraphFormat) except: try: # produces error if there", "textrange.InsertAfter(item[1]) #append symbol text # if item[0]: # char_inserted.Font.Name =", "None except: return False def get_index_as_button(self, index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id,", "Änderung {}\", **kwargs): self.locpin = locpin or pplib.GlobalLocPin self.items =", "performance check first if tag exists at all if pplib.ContentArea.isset_contentarea(presentation):", "with target area according to settings in the specified item", "for shapes ''' for textframe in pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe)", "#always convert newly inserted symbols into shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\",", "_add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange, item): if item[0] or", "value def on_change(self, shapes, selection, value): self.set_attr_for_shapes(shapes, selection, value) ###", "Setter methods ### def set_attr_for_shapes(self, shapes, selection, value): ''' Set", "textrange instead of textframe! if self.attr == \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore", "fill_width = fill_width /ref_width * width top = top /ref_height", "# def get_item_label(self, index): # item = self.items[index] # return", "remove if already exists and add to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item)", "symbols into shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert newly", "def set_attr_for_textrange(self, textrange, value): #using textrange instead of textframe! if", "@classmethod def convert_into_text(cls): return not (cls.convert_into_shape or cls.convert_into_bitmap) @classmethod def", "index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection:", "self.length_from_definition(pos_definition[0], ref_frame[2]) + ref_frame[0] top = self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1]", "cls.unicode_font = font #if font else SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font", "to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols", "= index def locpin_get_image(self, context, index=None): if index is None:", "False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls):", "value) ### Getter Methods ### def get_attr_from_shapes(self, shapes, selection): '''", "e.g. selection within a chart return self._get_attr(selection.TextRange2.ParagraphFormat) except: return None", "symbol text # if item[0]: # char_inserted.Font.Name = item[0] #font", "return PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self, value): pass def get_item_image(self, index):", "#FileName, LinkToFile, SaveWithDocument, Left, Top shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property", "orig_fontsize except: shape.select() else: new_shape.select() else: shape.select() def create_symbol_bitmap(self, slide,", "negative values specify distance 'from right' return reference - self.length_from_definition(-length_definition,", "in the first paragraph textrange.ParagraphFormat.LineRuleAfter = textrange.Paragraphs(1,1).ParagraphFormat.LineRuleAfter if self.attr ==", "= textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr == \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter == -2:", "''' item = self.items[index] position = item[1] reference = item[2]", "works, f100 doesnt work) InsertSymbol does not work anymore. Also", "page_setup.SlideHeight # target size left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame", "into shape or bitmap if PPTSymbolsSettings.get_convert_into_bitmap(): self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1),", "return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)), on_action=bkt.Callback(lambda context, selection: self.on_action_indexed(None,", "get_item_supertip(self, index): return 'Verwende angezeigten Position/Größe.' def create_image(self, position, reference,", "selected selection.TextRange2.Text = \"\" #remove selected text first and then", "(cls.convert_into_shape or cls.convert_into_bitmap) @classmethod def switch_convert_into_text(cls, pressed): cls.convert_into_shape = False", "index): return 'Verwende angezeigten Position/Größe.' def create_image(self, position, reference, presentation):", "no textframe detected return None elif int(value) == -2147483648: #replace", "coding: utf-8 -*- ''' Created on 02.11.2017 @author: fstallmann '''", "color = Drawing.ColorTranslator.FromHtml('#ff999999') pen = Drawing.Pen(color,1) g.DrawRectangle(pen, Drawing.Rectangle(0,0, width-1, height/5-1))", "methods ### def set_attr_for_shapes(self, shapes, selection, value): if self.attr !=", "PPTSymbolsSettings.unicode_font is not None: #font name is given, then insert", "beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod", "aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled = bkt.get_enabled_auto ) ], **kwargs", "@classmethod def get_convert_into_bitmap(cls): return (cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT)", "2./3, 1], 'CONTENT'], [u\"2/3 Rechts\", [1./3, 0, 2./3, 1], 'CONTENT'],", "\"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True else: my_kwargs[\"round_cm\"] = True my_kwargs[\"convert\"] =", "selection.Type == 3: # text selected try: # produces error", "#if values differ, set the same value as in the", "textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self, textframe, value): setattr(textframe,", "1./3, 1], 'CONTENT'], [u\"1/6 Oben\", [ 0, 0, 1, 1./6],", "chars til f0ff. raise TypeError(\"character number to large for InsertSymbol\")", "add to beginning cls.recent_symbols.remove(item) cls.recent_symbols.append(item) except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] =", "% getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns] def get_item_image(self, index, presentation): '''", "'CONTENT'], [u\"1/3 Links\", [ 0, 0, 1./3, 1], 'CONTENT'], [u\"1/3", "item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange, item): if item[0] or PPTSymbolsSettings.unicode_font", "#always convert newly inserted symbols into bitmap picture unicode_font =", "pressed bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"] = cls.convert_into_bitmap @classmethod def get_convert_into_bitmap(cls):", "new_shape.select() else: shape.select() def create_symbol_bitmap(self, slide, item): import tempfile, os", "newly inserted symbols into shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always", "slide.shapes.AddPicture(tmpfile, 0, -1, 200, 200) #FileName, LinkToFile, SaveWithDocument, Left, Top", "[]), maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert newly inserted", "# return \"%s\" % getattr(NumberedShapes, 'label_' + item['label'])[index%self.columns] def get_item_image(self,", "work for higher level unicode, e.g. emojis, and throws TypeError", "if PPTSymbolsSettings.get_convert_into_shape(): #convert into shape try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size", "self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1] width = self.length_from_definition(pos_definition[2], ref_frame[2]) height =", "same as InsertAfter return placeholder_char.InsertSymbol(font, char_number, -1) #symbol: FontName, CharNumber", "[u\"2/3 Rechts\", [1./3, 0, 2./3, 1], 'CONTENT'], [u\"1/2 Links\", [", "[ 0, 0, 2./3, 1], 'CONTENT'], [u\"2/3 Rechts\", [1./3, 0,", "percentage values return reference * length_definition else: # absolute values", "does not contain LeftIndent, etc. else: for textframe in pplib.iterate_shape_textframes(shapes):", "width, height def length_from_definition(self, length_definition, reference): if type(length_definition) == list:", "SymbolsGallery.fallback_font bkt.settings[\"bkt.symbols.unicode_font\"] = cls.unicode_font @classmethod def convert_into_text(cls): return not (cls.convert_into_shape", "kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image, context=True) my_kwargs.update(kwargs) super(LocpinGallery, self).__init__(**my_kwargs) def locpin_on_action_indexed(self,", "True, convert = 'pt_to_cm', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) my_kwargs.update(kwargs) super(TextframeSpinnerBox,", "bkt.settings.get(\"bkt.symbols.convert_into_shape\", True) #always convert newly inserted symbols into shapes convert_into_bitmap", "shape try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible =", "if item[0]: # shape.TextFrame.TextRange.Font.Name = item[0] #font name # shape.TextFrame.TextRange.Text", "# reference size if reference == 'CONTENT': v_offset = height/5", "= positions or PositionGallery.items super(PositionGallery, self).__init__( label = label, columns", "* height fill_height = fill_height/ref_height * height color = Drawing.ColorTranslator.FromHtml('#ffdd0000')", "self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange, item):", "index): try: return super(PPTSymbolsGalleryRecent, self).get_item_image(index) except: return super(PPTSymbolsGalleryRecent, self).create_symbol_image(\"Arial\", \"?\")", "#so, NameFarEast and NameComplexScript should be writable, but they are", "return l elif type(length_definition) in [int, float, long]: if length_definition", "= '###', round_cm = True, convert = 'pt_to_cm', get_enabled =", "except TypeError: char_inserted = textrange.InsertAfter(item[1]) #append symbol text #so, NameFarEast", "len(self.items)), get_item_label = bkt.Callback(lambda index: self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image, context=True),", "shapes): # return True # def get_item_label(self, index): # item", "### Getter Methods ### def get_attr_from_shapes(self, shapes, selection): ''' Get", "else: return 10 ## userdefined area def set_userdefined_area(self, presentation, shapes):", "Spinner Box callbacks ### def get_text(self, shapes, selection): value =", "0 my_kwargs = dict( size_string = '###', round_cm = True,", "Gallery wählbar und wird dauerhaft in der aktuellen Prästentation vorgehalten.\",", "each paragraph, so get value from first paragraph value =", ") class LocpinGallery(bkt.ribbon.Gallery): def __init__(self, locpin=None, item_supertip=\"Shape-Fixpunkt bzw. Fixierung bei", "use TextRange2 as TextRange does not contain LeftIndent, etc. else:", "presentation.PageSetup.SlideWidth, presentation.PageSetup.SlideHeight left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left = left", "[.25, 0, .5, 1], 'CONTENT'], [u\"1/2 Rechts\", [ .5, 0,", "size_string = '-###', get_enabled = bkt.apps.ppt_selection_contains_textframe, ) if self.attr in", "# -*- coding: utf-8 -*- ''' Created on 02.11.2017 @author:", "#office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText", "length_definition, reference): if type(length_definition) == list: # allow [150, 50%]", "first and then insert symbol self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text() and", "'MarginTop' def __init__(self, **kwargs): ''' attr examples: MarginTop, MarginBottom, MarginLeft,", "and textrange.ParagraphFormat.LineRuleBefore == -2: #if values differ, set the same", "#using textrange instead of textframe! if self.attr == \"SpaceBefore\" and", "Links\", [ 0, 0, 2./3, 1], 'CONTENT'], [u\"2/3 Rechts\", [1./3,", "TextRange does not contain LeftIndent, etc. else: for textframe in", "textrange.Paragraphs(1,1).ParagraphFormat.LineRuleBefore if self.attr == \"SpaceAfter\" and textrange.ParagraphFormat.LineRuleAfter == -2: #if", "in clicked element ''' item = self.symbols[index] self._add_to_recent(item) shift_or_ctrl =", "### def set_attr_for_shapes(self, shapes, selection, value): if self.attr != \"FirstLineIndent\":", "bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape =", "# char_inserted.Font.Name = item[0] #font name def insert_symbol_into_shapes(self, shapes, item):", "self).create_symbol_image(\"Arial\", \"?\") def button_get_label(self, index): try: return self.symbols[index][2] except: return", "Rechts\", [ .5, 0, .5, 1], 'CONTENT'], [u\"1/3 Links\", [", "item) # if item[0]: # shape.TextFrame.TextRange.Font.Name = item[0] #font name", "shapes convert_into_bitmap = bkt.settings.get(\"bkt.symbols.convert_into_bitmap\", False) #always convert newly inserted symbols", "self._get_attr(selection.TextRange2.ParagraphFormat) except: return None else: # shapes selected for textframe", "if self.attr == \"SpaceBefore\" and textrange.ParagraphFormat.LineRuleBefore == -2: #if values", "PPTSymbolsSettings.get_convert_into_shape(): #convert into shape try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size =", "if char_number > 61695: #for higher numbers (f0ff works, f100", "try: value = self.get_attr_from_textrange(textframe.TextRange) except: # produces error for certain", "= self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left = left /ref_width * width", "'Positionierung: ' + item[0] def get_item_supertip(self, index): return 'Verwende angezeigten", "def get_attr_from_shapes(self, shapes, selection): if selection.Type == 3: # text", "= bkt.apps.ppt_selection_contains_textframe, ) if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"]", "self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image, context=True), get_item_screentip = bkt.Callback(lambda index: self.items[index][1]),", "cls.convert_into_shape = False cls.convert_into_bitmap = False bkt.settings[\"bkt.symbols.convert_into_shape\"] = cls.convert_into_shape bkt.settings[\"bkt.symbols.convert_into_bitmap\"]", "symbol self.insert_symbol_into_text(selection.TextRange2, item) elif PPTSymbolsSettings.convert_into_text() and selection.Type == 2 and", "Mitte\", [.25, 0, .5, 1], 'CONTENT'], [u\"1/2 Rechts\", [ .5,", "'CONTENT'], [u\"1/6 Oben\", [ 0, 0, 1, 1./6], 'CONTENT'], [u\"1/6", "ref_frame[0] top = self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1] width = self.length_from_definition(pos_definition[2],", "shapes, selection, value): if self.attr != \"FirstLineIndent\": #FirstLineIndent can be", "self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else: # SLIDE / ABS ref_width,ref_height =", "= left /ref_width * width fill_width = fill_width /ref_width *", "self.get_attr_from_textframe(textframe) except: # produces error for certain chart types, e.g.", "name return char_inserted else: return textrange.InsertAfter(item[1]) #append symbol text #", "from collections import deque import bkt from bkt import dotnet", "= bkt.Callback(lambda index: self.items[index][2]), # children = [ # Item(image=gal_item[0],", "item): #pplib.iterate_shape_textframes(shapes, lambda textframe: self.insert_symbol_into_text(textframe.TextRange, item)) for textframe in pplib.iterate_shape_textframes(shapes):", "= self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame = pplib.BoundingFrame.from_rect(left, top, width, height)", "if self.attr == \"SpaceWithin\" and textrange.ParagraphFormat.LineRuleWithin == -2: #if values", "Instance initialization attr = 'SpaceBefore' def __init__(self, **kwargs): ''' attr", "slide.shapes.addTextbox( #office.MsoAutoShapeType.msoShapeRectangle.value__, 1, 100,100,200,200) shape.TextFrame2.WordWrap = 0 shape.TextFrame2.AutoSize = 1", "within a chart return self._get_attr(selection.TextRange2.ParagraphFormat) except: return None else: #", "error for certain chart types, e.g. Treemap continue try: if", "\"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape = slide.shapes.AddPicture(tmpfile, 0, -1, 200, 200)", "positions or PositionGallery.items super(PositionGallery, self).__init__( label = label, columns =", "_get_attr(self, par_format): if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if (self.attr", "negative! value = max(0,value) if selection.Type == 3: # text", "= self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else: # SLIDE / ABS ref_width,ref_height", "= 0 shape.TextFrame2.MarginTop = 0 shape.TextFrame2.MarginLeft = 0 shape.TextFrame2.MarginRight =", "left, top, width, height def length_from_definition(self, length_definition, reference): if type(length_definition)", "PositionGallery.items super(PositionGallery, self).__init__( label = label, columns = columns, image_mso='PositionAnchoringGallery',", "as symbol with special font (e.g. Arial Unicode) @classmethod def", "with fallback value return self.fallback_value else: return value def on_change(self,", "width-1, height-1)) return img def rect_from_definition(self, pos_definition, ref_frame=[0,0,640,480]): left =", "value from first paragraph value = self._get_attr(textframe.TextRange.Paragraphs(1,1).ParagraphFormat) except: pass return", "self.set_attr_for_textrange(selection.TextRange2, value) #need to use TextRange2 as TextRange does not", "PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape = bkt.settings.get(\"bkt.symbols.convert_into_shape\", True)", "#remove selected text first and then insert symbol self.insert_symbol_into_text(selection.TextRange2, item)", "if not \"image\" in kwargs and not \"image_mso\" in kwargs:", "return True # def get_item_label(self, index): # item = self.items[index]", "columns=3, **kwargs): self.items = positions or PositionGallery.items super(PositionGallery, self).__init__( label", "l += self.length_from_definition(ldef, reference) return l elif type(length_definition) in [int,", "except: return None else: # shapes selected for textframe in", "<= 1: # percentage values return reference * length_definition else:", "context.presentation.PageSetup ref_left,ref_top = 0, 0 ref_width,ref_height = page_setup.SlideWidth, page_setup.SlideHeight #", "my_kwargs = dict( size_string = '-###', get_enabled = bkt.apps.ppt_selection_contains_textframe, )", "Left, Top shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self): return", "# text selected try: # produces error if no text", "get_item_supertip = bkt.Callback(lambda index: self.items[index][2]), # children = [ #", "length_definition < 0: # negative values specify distance 'from right'", "to performance check first if tag exists at all if", "[u\"1/2 Mitte\", [.25, 0, .5, 1], 'CONTENT'], [u\"1/2 Rechts\", [", "does not work for higher level unicode, e.g. emojis, and", "Bereich ist anschließend über die Gallery wählbar und wird dauerhaft", "my_kwargs.update(kwargs) super(TextframeSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks ### def get_text(self,", "self.create_symbol_bitmap(selection.SlideRange(1), item) else: self.create_symbol_shape(selection.SlideRange(1), item) def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def", "target size left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame = pplib.BoundingFrame.from_rect(left,", "numberd shape according of settings in clicked element ''' item", "LineSpacing ''' #self.attr is automatically set through RibbonControl attribute handling", "value) class ParagraphFormatSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'SpaceBefore' def", "symbol font = item[0] or self.fallback_font try: char_number = ord(item[1])", "Drawing = dotnet.import_drawing() from . import helpers as pplib class", "new_shape.TextFrame2.TextRange.Font.Size = orig_fontsize except: shape.select() else: new_shape.select() else: shape.select() def", "= 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0 new_shape = pplib.convert_text_into_shape(shape) new_shape.TextFrame2.TextRange.Font.Size =", "context, selection, **kwargs): ''' create numberd shape according of settings", "pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) # for shape in shapes: # if", "self.huge_step = 0.5 self.big_step = 0.2 self.small_step = 0.1 self.round_at", "large for InsertSymbol\") #fallback to InsertAfter placeholder_char = textrange.InsertAfter(\"X\") #append", "selected_item, index, context, selection, **kwargs): ''' create numberd shape according", "self.attr, value) class PPTSymbolsSettings(object): recent_symbols = deque(bkt.settings.get(\"bkt.symbols.recent_symbols\", []), maxlen=3) convert_into_shape", "char_number > 61695: #for higher numbers (f0ff works, f100 doesnt", "par_format.LineRuleWithin == 0): self.huge_step = 10 self.big_step = 3 self.small_step", "Box callbacks ### def get_text(self, shapes, selection): value = self.get_attr_from_shapes(shapes,", "bkt.Callback(lambda: len(self.items)), get_item_label = bkt.Callback(lambda index: self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image,", "size if reference == 'CONTENT': ref_left,ref_top,ref_width,ref_height = pplib.slide_content_size(context.slide) else: #", "self.huge_step = 10 self.big_step = 3 self.small_step = 1 self.round_at", "can be negative! value = max(0,value) if selection.Type == 3:", "element ''' item = self.symbols[index] self._add_to_recent(item) shift_or_ctrl = bkt.get_key_state(bkt.KeyCodes.CTRL) or", "''' creates an item image with target area according to", "from bkt import dotnet Drawing = dotnet.import_drawing() from . import", "5./6, 1, 1./6], 'CONTENT'] ] def __init__(self, positions=None, label=\"Standardpositionen\", columns=3,", "**kwargs): self.locpin = locpin or pplib.GlobalLocPin self.items = [ (\"fix_locpin_tl\",", "= self.length_from_definition(pos_definition[3], ref_frame[3]) return left, top, width, height def length_from_definition(self,", "length_definition <= 1: # percentage values return reference * length_definition", "item[2] #self.change_position(selection, shapes, item[1]) # reference size if reference ==", "def button_get_label(self, index): try: return self.symbols[index][2] except: return \"Zuletzt verwendet:", "or self.fallback_font try: char_number = ord(item[1]) #ord does not work", "'CONTENT': v_offset = height/5 v_ref = (height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position,", "shift_or_ctrl: #shapes selected self.insert_symbol_into_shapes(pplib.get_shapes_from_selection(selection), item) else: #convert into shape or", "textrange, value): #using textrange instead of textframe! if self.attr ==", "g = Drawing.Graphics.FromImage(img) # reference size if reference == 'CONTENT':", "top, width, height def length_from_definition(self, length_definition, reference): if type(length_definition) ==", "left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,0,ref_width, ref_height]) left = left /ref_width *", "self.on_action_indexed(None, index, context, selection)), get_image=bkt.Callback(lambda: self.get_item_image(index)), get_visible=bkt.Callback(lambda: self.button_get_visible(index)), ) class", "target_frame=frame, **kwargs) def get_item_count(self, presentation): self.init_userdefined_area_item(presentation) return len(self.items) # def", "#self.attr is automatically set through RibbonControl attribute handling self.fallback_value =", "(cls.convert_into_bitmap or bkt.get_key_state(bkt.KeyCodes.CTRL)) and not bkt.get_key_state(bkt.KeyCodes.SHIFT) class PPTSymbolsGallery(bkt.ribbon.SymbolsGallery): @property def", "reference) elif length_definition <= 1: # percentage values return reference", "brush = Drawing.SolidBrush(color) g.FillRectangle(brush, Drawing.Rectangle(round(left),round(top), round(fill_width), round(fill_height))) color = Drawing.ColorTranslator.FromHtml('#ff999999')", "[\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: if (self.attr == \"SpaceBefore\" and par_format.LineRuleBefore ==", "# return True # def get_item_label(self, index): # item =", "[ 0, 0, .5, 1], 'CONTENT'], [u\"1/2 Mitte\", [.25, 0,", "left,top,width,height = self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame = pplib.BoundingFrame.from_rect(left, top, width,", "if self.attr != \"FirstLineIndent\": #FirstLineIndent can be negative! value =", ") if self.attr in [\"SpaceBefore\", \"SpaceAfter\", \"SpaceWithin\"]: my_kwargs[\"round_pt\"] = True", "v_ref = (height*4)/5 left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else: #", "item['label'])[index%self.columns] def get_item_image(self, index, presentation): ''' creates an item image", "reference] # position: [left, top, width, height] # values can", "PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self, value):", "right' return reference - self.length_from_definition(-length_definition, reference) elif length_definition <= 1:", "400, None) tmpfile = os.path.join(tempfile.gettempdir(), \"bkt-symbol.png\") img.Save(tmpfile, Drawing.Imaging.ImageFormat.Png) shape =", "except ValueError: cls.recent_symbols.append(item) bkt.settings[\"bkt.symbols.recent_symbols\"] = cls.recent_symbols @classmethod def switch_unicode_font(cls, font=None):", "self.fallback_font img = bkt.ribbon.SymbolsGallery.create_symbol_image(font, item[1], 400, None) tmpfile = os.path.join(tempfile.gettempdir(),", "higher level unicode, e.g. emojis, and throws TypeError if char_number", "shape.TextFrame2.AutoSize = 1 #ppAutoSizeShapeToFitText shape.TextFrame2.MarginBottom = 0 shape.TextFrame2.MarginTop = 0", "my_kwargs[\"small_step\"] = 0.125 my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ###", "max(0,value) for textframe in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self, textframe,", "try: # produces error if no text is selected return", "class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter def symbols(self,", "init_userdefined_area_item(self, presentation): #due to performance check first if tag exists", "in kwargs and not \"image_mso\" in kwargs: my_kwargs[\"get_image\"] = bkt.Callback(self.locpin_get_image,", "### def set_attr_for_shapes(self, shapes, selection, value): ''' Set attr for", "[ 0, 0, 1./3, 1], 'CONTENT'], [u\"1/3 Mitte\", [1./3, 0,", "(\"fix_locpin_mr\", \"Mitte-rechts\", item_supertip.format(\"mitte-rechts\")), (\"fix_locpin_bl\", \"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\",", "= self.length_from_definition(pos_definition[1], ref_frame[3]) + ref_frame[1] width = self.length_from_definition(pos_definition[2], ref_frame[2]) height", "if tag exists at all if pplib.ContentArea.isset_contentarea(presentation): left, top, width,", "0, 1./3, 1], 'CONTENT'], [u\"1/3 Mitte\", [1./3, 0, 1./3, 1],", "== -2147483648: #replace large negative number (values differ between selected", "in pplib.iterate_shape_textframes(shapes): try: return self.get_attr_from_textframe(textframe) except: # produces error for", "for textframe in pplib.iterate_shape_textframes(shapes): self.insert_symbol_into_text(textframe.TextRange, item) # for shape in", "'on_position_change' in self._callbacks: if context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def", "Instance initialization attr = 'MarginTop' def __init__(self, **kwargs): ''' attr", "False def get_index_as_button(self, index): return bkt.ribbon.Button( id=\"{}_button_{}\".format(self.id, index), get_label=bkt.Callback(lambda: self.button_get_label(index)),", "self).__init__(**my_kwargs) ### Spinner Box callbacks ### def get_text(self, shapes, selection):", "== 0): self.huge_step = 10 self.big_step = 3 self.small_step =", "or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and not bkt.get_key_state(bkt.KeyCodes.CTRL) @classmethod def switch_convert_into_bitmap(cls, pressed): cls.convert_into_shape", "= pplib.slide_content_size(context.slide) else: # SLIDE / ABS page_setup = context.presentation.PageSetup", "+ ref_frame[1] width = self.length_from_definition(pos_definition[2], ref_frame[2]) height = self.length_from_definition(pos_definition[3], ref_frame[3])", "char_inserted.Font.Name = font #font name return char_inserted else: return textrange.InsertAfter(item[1])", "def button_get_visible(self, index): try: return self.symbols[index] is not None except:", "get_item_label(self, index): # item = self.items[index] # return \"%s\" %", "left,top,fill_width,fill_height = self.rect_from_definition(position, ref_frame=[0,v_offset,width, v_ref]) else: # SLIDE / ABS", "(\"fix_locpin_tr\", \"Oben-rechts\", item_supertip.format(\"oben-rechts\")), (\"fix_locpin_ml\", \"Mitte-links\", item_supertip.format(\"mitte-links\")), (\"fix_locpin_mm\", \"Mitte-mitte\", item_supertip.format(\"mitte-mitte\")), (\"fix_locpin_mr\",", "except: try: # produces error if there is no textrange,", "else: return value def on_change(self, shapes, selection, value): self.set_attr_for_shapes(shapes, selection,", "' + item[0] def get_item_supertip(self, index): return 'Verwende angezeigten Position/Größe.'", "height = 40 width = height*16./9 img = Drawing.Bitmap(width, height)", "TypeError: char_inserted = textrange.InsertAfter(item[1]) #append symbol text #so, NameFarEast and", "self.rect_from_definition(position, ref_frame=[ref_left,ref_top,ref_width, ref_height]) frame = pplib.BoundingFrame.from_rect(left, top, width, height) if", "[left, top, width, height] # values can be absolute or", "def _add_to_recent(self, item): PPTSymbolsSettings.add_to_recent(item) def insert_symbol_into_text(self, textrange, item): if item[0]", "'CONTENT'], [u\"2/3 Links\", [ 0, 0, 2./3, 1], 'CONTENT'], [u\"2/3", "try: orig_fontsize = shape.TextFrame2.TextRange.Font.Size shape.TextFrame2.TextRange.Font.Size = 60 shape.TextFrame2.TextRange.ParagraphFormat.Bullet.Visible = 0", "according of settings in clicked element ''' item = self.symbols[index]", "selected for textframe in pplib.iterate_shape_textframes(shapes): try: value = self.get_attr_from_textrange(textframe.TextRange) except:", "length_definition else: # absolute values return length_definition else: return 10", "pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self, textframe, value): setattr(textframe, self.attr, value)", "bkt.Callback(lambda index: self.items[index][1]), get_item_image = bkt.Callback(self.locpin_get_image, context=True), get_item_screentip = bkt.Callback(lambda", "if context: return context.invoke_callback(self._callbacks['on_position_change'], target_frame=frame, **kwargs) def get_item_count(self, presentation): self.init_userdefined_area_item(presentation)", "+ item[0] def get_item_supertip(self, index): return 'Verwende angezeigten Position/Größe.' def", "= self.items[index] position = item[1] reference = item[2] #self.change_position(selection, shapes,", "import deque import bkt from bkt import dotnet Drawing =", "= item[1] #symbol text if PPTSymbolsSettings.get_convert_into_shape(): #convert into shape try:", "= [ (\"fix_locpin_tl\", \"Oben-links\", item_supertip.format(\"oben-links\")), (\"fix_locpin_tm\", \"Oben-mitte\", item_supertip.format(\"oben-mitte\")), (\"fix_locpin_tr\", \"Oben-rechts\",", "userdefined area def set_userdefined_area(self, presentation, shapes): if len(shapes) == 1:", "# shape.TextFrame.TextRange.Text = item[1] #symbol text if PPTSymbolsSettings.get_convert_into_shape(): #convert into", "shape.select() os.remove(tmpfile) class PPTSymbolsGalleryRecent(PPTSymbolsGallery): @property def symbols(self): return PPTSymbolsSettings.recent_symbols @symbols.setter", "**kwargs): ''' reposition shapes according of settings in clicked element", "return reference * length_definition else: # absolute values return length_definition", "wird dauerhaft in der aktuellen Prästentation vorgehalten.\", on_action=bkt.Callback(self.set_userdefined_area), get_enabled =", "pplib class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'MarginTop' def", "# for gal_item in self.items # ] ) if not", "in pplib.iterate_shape_textframes(shapes): self.set_attr_for_textframe(textframe, value) def set_attr_for_textframe(self, textframe, value): setattr(textframe, self.attr,", "my_kwargs[\"rounding_factor\"] = 0.125 my_kwargs.update(kwargs) super(ParagraphFormatSpinnerBox, self).__init__(**my_kwargs) ### Spinner Box callbacks", "according of settings in clicked element ''' item = self.items[index]", "try: return self.symbols[index][2] except: return \"Zuletzt verwendet: Undefined\" def button_get_visible(self,", "= cls.convert_into_bitmap @classmethod def get_convert_into_shape(cls): return (cls.convert_into_shape or bkt.get_key_state(bkt.KeyCodes.SHIFT)) and", "button_get_label(self, index): try: return self.symbols[index][2] except: return \"Zuletzt verwendet: Undefined\"", "get_enabled=bkt.apps.ppt_shapes_or_text_selected, columns=\"3\", item_height=\"24\", item_width=\"24\", show_item_label=False, on_action_indexed = bkt.Callback(self.locpin_on_action_indexed), get_selected_item_index =", "= 0.1 self.round_at = 1 return getattr(par_format, self.attr) ### Setter", "class TextframeSpinnerBox(bkt.ribbon.RoundingSpinnerBox): ### Instance initialization attr = 'MarginTop' def __init__(self,", "# if item[0]: # shape.TextFrame.TextRange.Font.Name = item[0] #font name #", "\"Unten-links\", item_supertip.format(\"unten-links\")), (\"fix_locpin_bm\", \"Unten-mitte\", item_supertip.format(\"unten-mitte\")), (\"fix_locpin_br\", \"Unten-rechts\", item_supertip.format(\"unten-rechts\")), ] my_kwargs", "#symbol text if PPTSymbolsSettings.get_convert_into_shape(): #convert into shape try: orig_fontsize =", "height*16./9 img = Drawing.Bitmap(width, height) g = Drawing.Graphics.FromImage(img) # reference" ]
[ ":param position: :param queue: \"\"\" return self(AbilityId.PATROL, target=position, queue=queue) def", "in self._type_data.attributes @property def is_biological(self) -> bool: \"\"\" Checks if", "space is available at maximum. \"\"\" return self._proto.cargo_space_max @property def", "Point3.from_proto(self._proto.pos) def distance_to(self, p: Union[Unit, Point2, Point3]) -> Union[int, float]:", "if the unit is revealed. \"\"\" return self._proto.cloak is IS_REVEALED", "returning from mineral field or vespene geyser to deliver resources", "\"\"\" Returns the tags of the units inside a Bunker,", "int: \"\"\" Returns the ideal harverster count for unit. 3", "tags of the units inside a Bunker, CommandCenter, PlanetaryFortress, Medivac,", "-> int: \"\"\" Returns the maximum amount of frames of", "return False return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius + target.radius", "'Mechanical', 'Psionic', 'Massive', 'Structure'. \"\"\" # TODO: Consider units with", "return self._proto.is_blip @property def is_powered(self) -> bool: \"\"\" Checks if", "queued up to seven patrol points. If the last point", "an ability on the target without checking ability cooldown (like", "geysers. \"\"\" return bool(self._proto.vespene_contents) @property def is_flying(self) -> bool: \"\"\"", "if unit.weapon_cooldown == 0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack))", "functions def has_buff(self, buff: BuffId) -> bool: \"\"\" Checks if", "return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def passengers(self) -> Set[Unit]: \"\"\" Returns", "or a gas extraction building. :param target: :param queue: \"\"\"", "Set[AbilityId]]) -> bool: \"\"\" Check if the unit is using", "typs are: 'Light', 'Armored', 'Biological', 'Mechanical', 'Psionic', 'Massive', 'Structure'. \"\"\"", "a geyser or mining base.\"\"\" return self._proto.assigned_harvesters @property def ideal_harvesters(self)", "self._type_data.has_minerals @property def is_vespene_geyser(self) -> bool: \"\"\" Checks if the", "target. :param target: :param bonus_distance: \"\"\" # TODO: Fix this", "made available through research (like HT storm). :param ability_id: :param", "self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self) -> bool: \"\"\" Checks if the", "-> Union[int, float]: \"\"\" Returns the movement speed of the", "\"\"\" if self.type_id == UNIT_BATTLECRUISER: return True if self._weapons: return", "the bot. NOTE: This means the bot has vision of", "Unit = None, queue: bool = False) -> UnitCommand: \"\"\"", "def shield_upgrade_level(self) -> int: \"\"\" Returns the upgrade level of", "are: 'Light', 'Armored', 'Biological', 'Mechanical', 'Psionic', 'Massive', 'Structure'. \"\"\" #", "self._proto.health_max == 0: return 0 return self._proto.health / self._proto.health_max @property", "blink) or if ability is made available through research (like", "be attacked. \"\"\" return self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache def buffs(self)", "SCV that is currently building. Only works for own units.", "of cargo space the unit needs. \"\"\" return self._type_data.cargo_size @property", "float]: \"\"\" Returns direction the unit is facing as a", "until it gets new orders. :param queue: \"\"\" return self(AbilityId.HOLDPOSITION,", "None @property def armor(self) -> Union[int, float]: \"\"\" Returns the", "'massive' attribute. \"\"\" return IS_MASSIVE in self._type_data.attributes @property def is_psionic(self)", "@property def buff_duration_max(self) -> int: \"\"\" Returns the maximum amount", "-> bool: \"\"\" Checks if the unit is burrowed. \"\"\"", "building. \"\"\" return self._type_data.has_vespene @property def health(self) -> Union[int, float]:", "the unit has. Does not include shields. \"\"\" if self._proto.health_max", "pylon or warppism. \"\"\" return self._proto.is_powered @property def is_active(self) ->", "Returns the race of the unit \"\"\" return Race(self._type_data._proto.race) @property", "TODO BATTLECRUISER doesnt have weapons in proto?! return bool(self._weapons) or", "for own units. \"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self) ->", "connect to addon \"\"\" return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def passengers(self)", "= False) -> UnitCommand: \"\"\" Orders a unit to stop,", "IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING,", "the unit is powered by a pylon or warppism. \"\"\"", "the bot has vision of the position of the unit!", "is_powered(self) -> bool: \"\"\" Checks if the unit is powered", "is_patrolling(self) -> bool: \"\"\" Checks if a unit is patrolling.", "upgrades. \"\"\" if self.type_id == UNIT_ORACLE: return 4 if self.type_id", "def move(self, position: Union[Point2, Point3], queue: bool = False) ->", "\"\"\" return self._proto.alliance == IS_ENEMY @property def owner_id(self) -> int:", "Returns direction the unit is facing as a float in", "BELOW THIS COMMENT ARE NOT POPULATED FOR ENEMIES @property_mutable_cache def", "-> bool: \"\"\" Checks if the unit has the 'armored'", "== UNIT_BATTLECRUISER: return 6 if self.can_attack_ground: weapon = next((weapon for", "bonus_distance ) return False @property def facing(self) -> Union[int, float]:", "Checks if the unit is a detector. Has to be", "Only works for own units. \"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def", "List, Optional, Set, Tuple, Union, TYPE_CHECKING from .cache import property_immutable_cache,", "unique tag of the unit. \"\"\" return self._proto.tag @property def", "This means the bot has vision of the position of", "of the unit. \"\"\" return Point3.from_proto(self._proto.pos) def distance_to(self, p: Union[Unit,", "\"\"\" return self._proto.mineral_contents @property def vespene_contents(self) -> int: \"\"\" Returns", "without a weapon. \"\"\" return self._proto.attack_upgrade_level @property def armor_upgrade_level(self) ->", "bool: \"\"\" Checks if a unit is returning from mineral", "race of the unit \"\"\" return Race(self._type_data._proto.race) @property def tag(self)", "need to be powered. \"\"\" return self.is_ready and (self.type_id in", "See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius @property def build_progress(self) -> Union[int,", "idle. \"\"\" return not self._proto.orders def is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]])", "the unit! It does not give any information about the", "another unit def is_facing_unit(self, other_unit: Unit, angle_error: float = 1e-3)", "\"\"\" Returns the a list of the current orders. \"\"\"", "minerals remaining in a mineral field. \"\"\" return self._proto.mineral_contents @property", "Checks if a unit is on its way to a", "its own if it is attacked, enemy unit is in", "# NOTE: Returns 0 for units without a weapon. \"\"\"", "CloakState, DisplayType, Race, TargetType, warpgate_abilities, TargetType, Target from .ids.ability_id import", "\"\"\" Checks if this unit has any units loaded. \"\"\"", "unit is a detector. Has to be completed in order", "= False) -> UnitCommand: \"\"\" Orders a unit to stop", "= False) -> UnitCommand: \"\"\" Orders unit to attack. Target", "return self._proto.energy_max @property def energy_percentage(self) -> Union[int, float]: \"\"\" Returns", "= False) -> UnitCommand: \"\"\" Orders unit to research 'upgrade'.", "queue: \"\"\" return self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def move(self, position: Union[Point2,", "the unit can attack both ground and air units. \"\"\"", "IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from .data import Alliance,", "3d position of the unit. \"\"\" return Point3.from_proto(self._proto.pos) def distance_to(self,", "unit has the 'biological' attribute. \"\"\" return IS_BIOLOGICAL in self._type_data.attributes", "-> bool: \"\"\" Checks if a worker or MULE is", "unit move there and attack everything on its way. :param", "\"\"\" Returns a positive int if unit has too many", "@property def health(self) -> Union[int, float]: \"\"\" Returns the health", "import Any, Dict, List, Optional, Set, Tuple, Union, TYPE_CHECKING from", "-> Union[int, float]: \"\"\" Returns the percentage of shield points", ":param queue: \"\"\" return self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def return_resource(self, target:", "units. \"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self) -> bool: \"\"\"", "the unit can attack ground units. \"\"\" if self.type_id in", "if the unit is a mineral field. \"\"\" return self._type_data.has_minerals", "state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak @property def is_cloaked(self) ->", "not self.orders: return False if isinstance(abilities, AbilityId): abilities = {abilities}", "it is attacked, enemy unit is in range or other", "-1 @property def engaged_target_tag(self) -> int: # TODO What does", "True if self._weapons: return any(weapon.type in TARGET_GROUND for weapon in", "of 1 or 2 in a two player game. \"\"\"", "is cloaked. \"\"\" return self._proto.cloak in IS_CLOAKED @property def is_revealed(self)", "-> bool: \"\"\" Checks if the unit is currently training", "typing import Any, Dict, List, Optional, Set, Tuple, Union, TYPE_CHECKING", "import Point2, Point3 from .unit_command import UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING:", "in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return True if self._weapons: return any(weapon.type in", "is_enemy(self) -> bool: \"\"\" Checks if the unit is hostile.", "if a unit is on its way to a mineral", "def cloak(self) -> CloakState: \"\"\" Returns cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95", "worker is carrying a resource. \"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property", "at 'position'. :param unit: :param queue: \"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id", "Returns the movement speed of the unit. Does not include", "unit is revealed or not cloaked and therefore can be", "Point3], queue: bool = False) -> UnitCommand: \"\"\" Orders unit", "do? return self._proto.engaged_target_tag # Unit functions def has_buff(self, buff: BuffId)", "therefore can be attacked. \"\"\" return self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache", "proto_data, bot_object: BotAI): \"\"\" :param proto_data: :param bot_object: \"\"\" self._proto", "Orders the unit to return resource. Does not need a", "repair_target: Unit, queue: bool = False) -> UnitCommand: \"\"\" Order", "'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id,", "units. \"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self) -> bool: \"\"\"", "unit is hostile. \"\"\" return self._proto.alliance == IS_ENEMY @property def", "ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting abilities that target other", "\"\"\" if self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_air: weapon", "Union[int, float]: \"\"\" Returns the shield points the unit has.", "@property def energy_max(self) -> Union[int, float]: \"\"\" Returns the maximum", "percentage of shield points the unit has. Returns 0 for", "@property_immutable_cache def is_detector(self) -> bool: \"\"\" Checks if the unit", "bool: \"\"\" Check if the unit is using one of", "\"\"\" Returns the maximum health of the unit. Does not", "\"\"\" Checks if unit is idle. \"\"\" return not self._proto.orders", "if unit is idle. \"\"\" return not self._proto.orders def is_using_ability(self,", "at all. \"\"\" # TODO BATTLECRUISER doesnt have weapons in", "this returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV, this returns None \"\"\"", "self._proto.shield_max @property def energy(self) -> Union[int, float]: \"\"\" Returns the", "0 for non-protoss units. \"\"\" return self._proto.shield @property def shield_max(self)", "bool: \"\"\" Checks if the unit is flying. \"\"\" return", "is_cloaked(self) -> bool: \"\"\" Checks if the unit is cloaked.", "float]: return self._proto.radar_range @property def is_selected(self) -> bool: \"\"\" Checks", "unit is your own hallucination or detected. \"\"\" return self._proto.is_hallucination", "-> Union[int, float]: \"\"\" Returns the amount of energy the", "@property def cargo_used(self) -> Union[float, int]: \"\"\" Returns how much", "if unit has too many harvesters mining, a negative int", "unit is currently selected. \"\"\" return self._proto.is_selected @property def is_on_screen(self)", "research 'upgrade'. Requires UpgradeId to be passed instead of AbilityId.", "(target.is_flying or target.type_id == UNIT_COLOSSUS): unit_attack_range = self.air_range else: return", "-> bool: \"\"\" Checks if the unit is revealed. \"\"\"", "-> Union[int, float]: \"\"\" Returns the range against air units.", "self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self) -> bool: \"\"\" Checks if a", "'(bonus damage, armor type)' if unit does 'bonus damage' against", "or reactor), returns the position where a terran building has", "the maximum amount of energy the unit can have. Returns", "Union[int, float]: \"\"\" Returns direction the unit is facing as", "return bool(self._proto.vespene_contents) @property def is_flying(self) -> bool: \"\"\" Checks if", "is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]]) -> bool: \"\"\" Check if the", "def is_visible(self) -> bool: \"\"\" Checks if the unit is", "is attacking. Only works for own units. \"\"\" return self.is_using_ability(IS_ATTACKING)", "lower range than this formula if self.can_attack_ground and not target.is_flying:", "returns UnitTypeId.OrbitalCommand For SCV, this returns None \"\"\" return self._type_data.unit_alias", "is the same as CommandCenter For Hive, this returns [UnitTypeId.Hatchery,", "target=position, queue=queue) def repair(self, repair_target: Unit, queue: bool = False)", "UNIT_BATTLECRUISER: return 6 if self.can_attack_ground: weapon = next((weapon for weapon", "-> Union[int, float]: \"\"\" Returns the maximum health of the", "is_ready(self) -> bool: \"\"\" Checks if the unit is completed.", "little lower range than this formula if self.can_attack_ground and not", "tag (if it is a Unit) or Point2 (if it", "attribute. \"\"\" return IS_BIOLOGICAL in self._type_data.attributes @property def is_mechanical(self) ->", "way. \"\"\" return self._proto.display_type == IS_SNAPSHOT @property def is_visible(self) ->", "\"\"\" return self.build_progress == 1 @property def cloak(self) -> CloakState:", "is powered by a pylon or warppism. \"\"\" return self._proto.is_powered", "is moving. Only works for own units. \"\"\" return self.is_using_ability(AbilityId.MOVE)", "from mineral field or vespene geyser to deliver resources to", "\"\"\" Returns the tag of the addon of unit. \"\"\"", "-> bool: \"\"\" Checks if the target is in range.", "does 'bonus damage' against 'armor type'. Possible armor typs are:", "is using one of the given abilities. Only works for", "\"\"\" Returns the 2d position of the unit as tuple", "return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range + self.radius + bonus_distance", "def hold_position(self, queue: bool = False) -> UnitCommand: \"\"\" Orders", "the race of the unit \"\"\" return Race(self._type_data._proto.race) @property def", "ability_target_type in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit): return ( self._bot_object._distance_squared_unit_to_unit(self,", "target self.progress = progress def __repr__(self) -> str: return f\"UnitOrder({self.ability},", "attack. # NOTE: Returns 0 for units without a weapon.", "return self._proto.cargo_space_max @property def cargo_left(self) -> Union[float, int]: \"\"\" Returns", "queue: bool = False) -> UnitCommand: \"\"\" Orders the unit", "the unique tag of the unit. \"\"\" return self._proto.tag @property", "number of workers currently gathering resources at a geyser or", "has an addon attached. \"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self)", "bot_object # Used by property_immutable_cache self.cache = {} def __repr__(self)", "\"\"\" return self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def move(self, position: Union[Point2, Point3],", "\"\"\" Checks if the unit has the 'biological' attribute. \"\"\"", "tuple without conversion to Point2. \"\"\" return self._proto.pos.x, self._proto.pos.y @property_immutable_cache", "set of current buffs the unit has. \"\"\" return {BuffId(buff_id)", "-> bool: \"\"\" Checks if the unit transforming. Only works", "unit. \"\"\" return self._proto.tag @property def is_structure(self) -> bool: \"\"\"", "int if unit has too many harvesters mining, a negative", "return self(AbilityId.STOP, queue=queue) def patrol(self, position: Union[Point2, Point3], queue: bool", "'Light', 'Armored', 'Biological', 'Mechanical', 'Psionic', 'Massive', 'Structure'. \"\"\" # TODO:", "in order to detect and Photoncannons also need to be", "self(AbilityId.SCAN_MOVE, *args, **kwargs) def hold_position(self, queue: bool = False) ->", "upgrades. \"\"\" if self.can_attack_air: weapon = next((weapon for weapon in", "\"\"\" Deprecated: This ability redirects to 'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE,", "the name of the unit. \"\"\" return self._type_data.name @property def", "queue: \"\"\" return self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def return_resource(self, target: Unit", "IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR,", "\"\"\" Returns the race of the unit \"\"\" return Race(self._type_data._proto.race)", "Does not include upgrades. \"\"\" if self.type_id == UNIT_ORACLE: return", "return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self) -> bool: \"\"\" Checks if", "non-protoss units. \"\"\" return self._proto.shield_max @property def shield_percentage(self) -> Union[int,", "BATTLECRUISER doesnt have weapons in proto?! return bool(self._weapons) or self.type_id", "@property def is_powered(self) -> bool: \"\"\" Checks if the unit", "to be passed instead of AbilityId. :param upgrade: :param queue:", "False) -> UnitCommand: \"\"\" Orders a unit to stop, but", "= bot_object # Used by property_immutable_cache self.cache = {} def", "return 0 @property_immutable_cache def bonus_damage(self): \"\"\" Returns a tuple of", "or mining base.\"\"\" return self._proto.assigned_harvesters @property def ideal_harvesters(self) -> int:", "is only available as a snapshot for the bot. Enemy", "else: return None @property def armor(self) -> Union[int, float]: \"\"\"", "ideal_harvesters(self) -> int: \"\"\" Returns the ideal harverster count for", "self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self) ->", "def is_carrying_minerals(self) -> bool: \"\"\" Checks if a worker or", "-> Union[int, float]: \"\"\" Returns the health of the unit.", "is currently repairing. Only works for own units. \"\"\" return", "return self._proto.radius @property def build_progress(self) -> Union[int, float]: \"\"\" Returns", "2d position of the unit. \"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache def", "bool: \"\"\" Checks if the unit is moving. Only works", "in TARGET_GROUND), None) if weapon: return (weapon.damage * weapon.attacks) /", "Checks if the unit can attack ground units. \"\"\" if", "int: \"\"\" Returns the upgrade level of the units attack.", "order_target(self) -> Optional[Union[int, Point2]]: \"\"\" Returns the target tag (if", "gas buildings, 2*n for n mineral patches on that base.\"\"\"", "can attack at all. \"\"\" # TODO BATTLECRUISER doesnt have", "to gather minerals or gas. 'Target' must be a mineral", "Checks if the unit is cloaked. \"\"\" return self._proto.cloak in", "\"\"\" return self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache def buffs(self) -> Set:", "negative int if it has too few mining.\"\"\" return self._proto.assigned_harvesters", "bool = False) -> UnitCommand: \"\"\" Orders unit to research", "current buffs the unit has. \"\"\" return {BuffId(buff_id) for buff_id", "the unit has. Returns 0 for non-protoss units. \"\"\" return", "Returns 0 for non-protoss units. \"\"\" if self._proto.shield_max == 0:", "@property_immutable_cache def is_repairing(self) -> bool: \"\"\" Checks if the unit", "return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self) -> bool: \"\"\" Checks", "against ground units. Does not include upgrades. \"\"\" if self.can_attack_ground:", "@property def is_mechanical(self) -> bool: \"\"\" Checks if the unit", "WarpPrism. \"\"\" return {unit.tag for unit in self._proto.passengers} @property def", "return self._proto.buff_duration_max # PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED", "else proto.target_unit_tag), proto.progress, ) def __init__(self, ability: AbilityData, target, progress:", "the unit.\"\"\" return self._proto.display_type == IS_VISIBLE @property def alliance(self) ->", "Returns the shield points the unit has. Returns 0 for", "def position(self) -> Point2: \"\"\" Returns the 2d position of", "False) -> UnitCommand: \"\"\" Order an SCV or MULE to", "current orders. \"\"\" return [UnitOrder.from_proto(order, self._bot_object) for order in self._proto.orders]", "warppism. \"\"\" return self._proto.is_powered @property def is_active(self) -> bool: \"\"\"", "where a terran building has to land to connect to", "for gas buildings, 2*n for n mineral patches on that", "unit is currently training or researching. \"\"\" return self._proto.is_active #", "return Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self) -> Point3: \"\"\" Returns the", "def train(self, unit: UnitTypeId, queue: bool = False) -> UnitCommand:", "Target can be a Unit or Point2. Attacking a position", "Medivac, Nydus, Overlord or WarpPrism. \"\"\" return {Unit(unit, self._bot_object) for", "completed yet :param other_unit: :param angle_error: \"\"\" pass @property def", "minerals or gas. 'Target' must be a mineral patch or", "the 3d position of the unit. \"\"\" return Point3.from_proto(self._proto.pos) def", "!r}, tag={self.tag})\" @property_immutable_cache def type_id(self) -> UnitTypeId: \"\"\" UnitTypeId found", "Unit, angle_error: float = 1e-3) -> bool: \"\"\" Function not", "= weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name) else: return None @property def", "def is_armored(self) -> bool: \"\"\" Checks if the unit has", "\"\"\" Checks if the unit is powered by a pylon", "if unit has buff 'buff'. \"\"\" assert isinstance(buff, BuffId), f\"{buff}", "return self._type_data.name @property def race(self) -> Race: \"\"\" Returns the", "attack at all. Does not include upgrades. \"\"\" if self.type_id", "\"\"\" return self._proto.cloak @property def is_cloaked(self) -> bool: \"\"\" Checks", "at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit: :param position: :param", "Hive, this returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV, this returns None", "extraction building. :param target: :param queue: \"\"\" return self(AbilityId.HARVEST_GATHER, target=target,", "Union[int, float]: \"\"\" Returns the movement speed of the unit.", "\"\"\" try: return self._type_data._proto.weapons except: return None @property_immutable_cache def can_attack(self)", "like transfuse, feedback, snipe, yamato if ability_target_type in {Target.Unit.value, Target.PointOrUnit.value}", "0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack: return self._proto.weapon_cooldown return", "cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak @property def is_cloaked(self)", "of the current orders. \"\"\" return [UnitOrder.from_proto(order, self._bot_object) for order", "completion in range [0,1].\"\"\" return self._proto.build_progress @property def is_ready(self) ->", "@property_immutable_cache def is_collecting(self) -> bool: \"\"\" Checks if a unit", "def is_carrying_vespene(self) -> bool: \"\"\" Checks if a worker is", "unit. Does not include upgrades \"\"\" return self._type_data._proto.armor @property def", "unit is revealed. \"\"\" return self._proto.cloak is IS_REVEALED @property def", "self._proto.passengers} @property def cargo_used(self) -> Union[float, int]: \"\"\" Returns how", "\"\"\" return self._proto.cargo_space_taken @property def has_cargo(self) -> bool: \"\"\" Checks", "gas. 'Target' must be a mineral patch or a gas", "cast_range + self.radius + bonus_distance ) return False @property def", "be completed in order to detect and Photoncannons also need", "amount of remaining frames of the visible timer bar. #", "return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self, unit: UnitTypeId, position: Union[Point2, Point3]", "Checks if the unit is burrowed. \"\"\" return self._proto.is_burrowed @property", "addon of unit. \"\"\" return self._proto.add_on_tag @property def has_add_on(self) ->", "AbilityId): abilities = {abilities} return self.orders[0].ability.id in abilities @property_immutable_cache def", "str: \"\"\" Returns string of this form: Unit(name='SCV', tag=4396941328). \"\"\"", "of x axis.\"\"\" return self._proto.facing # TODO: a function that", "is controlled by the bot. \"\"\" return self._proto.alliance == IS_MINE", "@property def is_armored(self) -> bool: \"\"\" Checks if the unit", "range than this formula if self.can_attack_ground and not target.is_flying: unit_attack_range", "\"\"\" Returns the maximum amount of energy the unit can", "return self._proto.health / self._proto.health_max @property def shield(self) -> Union[int, float]:", "of the units armor. \"\"\" return self._proto.armor_upgrade_level @property def shield_upgrade_level(self)", "gather minerals or gas. 'Target' must be a mineral patch", "checking ability cooldown (like stalker blink) or if ability is", "0: return 0 return self._proto.energy / self._proto.energy_max @property def is_snapshot(self)", "return self._proto.cloak @property def is_cloaked(self) -> bool: \"\"\" Checks if", "def unit_alias(self) -> Optional[UnitTypeId]: \"\"\" Building type equality, e.g. FlyingOrbitalCommand", "if isinstance(abilities, AbilityId): abilities = {abilities} return self.orders[0].ability.id in abilities", "def _type_data(self) -> \"UnitTypeData\": \"\"\" Provides the unit type data.", "float]: \"\"\" Returns the maximum shield points the unit can", "-> bool: \"\"\" Checks if the unit is attacking. Only", "@property def is_structure(self) -> bool: \"\"\" Checks if the unit", "\"\"\" if self.type_id == UNIT_ORACLE: return 4 if self.type_id ==", "upgrade level of the units shield. # NOTE: Returns 0", "- self._proto.cargo_space_taken @property def assigned_harvesters(self) -> int: \"\"\" Returns the", "-> Union[int, float]: \"\"\" Returns the maximum shield points the", "if this unit is facing another unit def is_facing_unit(self, other_unit:", "bool = False) -> UnitCommand: \"\"\" Orders the unit to", "@property_immutable_cache def can_attack_ground(self) -> bool: \"\"\" Checks if the unit", "def repair(self, repair_target: Unit, queue: bool = False) -> UnitCommand:", "0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self, target: Unit, bonus_distance: Union[int,", "UnitCommand: \"\"\" Orders a unit to stop, but can start", "proto, bot_object: BotAI): return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else", "-> bool: \"\"\" Function not completed yet :param other_unit: :param", "return IS_PSIONIC in self._type_data.attributes @property def tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\"", "IS_CLOAKED @property def is_revealed(self) -> bool: \"\"\" Checks if the", "self.type_id in transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self) -> bool:", "return 6 if self.can_attack_ground: weapon = next((weapon for weapon in", "-> bool: \"\"\" Checks if the unit is powered by", "unit. Note that some units take up more than one", "self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self) -> bool: \"\"\" Checks if a", "float = None): \"\"\" :param ability: :param target: :param progress:", "in range [0,1].\"\"\" return self._proto.build_progress @property def is_ready(self) -> bool:", "self._weapons: if weapon.damage_bonus: b = weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name) else:", "\"\"\" Orders unit to research 'upgrade'. Requires UpgradeId to be", "-> int: \"\"\" Returns a positive int if unit has", "a worker or MULE is carrying (gold-)minerals. \"\"\" return not", "-> UnitCommand: \"\"\" Orders unit to train another 'unit'. Usage:", "bool: \"\"\" Test if a unit is able to cast", "units. Does not include upgrades. \"\"\" if self.type_id == UNIT_ORACLE:", "be a Unit (to follow that unit) or Point2. :param", "Returns the percentage of health the unit has. Does not", "self._proto.shield / self._proto.shield_max @property def energy(self) -> Union[int, float]: \"\"\"", "unit_type = self._proto.unit_type if unit_type not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] =", "def bonus_damage(self): \"\"\" Returns a tuple of form '(bonus damage,", "or buffs. \"\"\" return self._type_data._proto.movement_speed @property def is_mineral_field(self) -> bool:", "a 'target'. :param target: :param queue: \"\"\" return self(AbilityId.HARVEST_RETURN, target=target,", "can have. Returns 0 for non-protoss units. \"\"\" return self._proto.shield_max", "-> Union[int, float]: \"\"\" Returns the maximum amount of energy", "== UNIT_COLOSSUS): unit_attack_range = self.air_range else: return False return (", "if the unit is revealed or not cloaked and therefore", "Checks if the unit is hostile. \"\"\" return self._proto.alliance ==", "is the same as OrbitalCommand For flying OrbitalCommand, this returns", "== UNIT_BATTLECRUISER: return 6 if self.can_attack_air: weapon = next((weapon for", "@property def ideal_harvesters(self) -> int: \"\"\" Returns the ideal harverster", "from .ids.buff_id import BuffId from .ids.upgrade_id import UpgradeId from .ids.unit_typeid", "\"\"\" UnitTypeId found in sc2/ids/unit_typeid. Caches all type_ids of the", "own units. \"\"\" return self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self) -> int:", "Checks if the unit is currently selected. \"\"\" return self._proto.is_selected", "and air units. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return True", "is made available through research (like HT storm). :param ability_id:", "self, ability_id: AbilityId, target: Union[Unit, Point2], bonus_distance: float = 0", "in the unit. Note that some units take up more", "def is_selected(self) -> bool: \"\"\" Checks if the unit is", ":param target: :param queue: \"\"\" return self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def", "float]: \"\"\" Returns the percentage of shield points the unit", "__eq__(self, other): try: return self.tag == other.tag except: return False", "means the bot has vision of the position of the", "-> bool: \"\"\" Checks if a geyser has any gas", "TODO What does this do? return self._proto.engaged_target_tag # Unit functions", "TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS,", "engaged_target_tag(self) -> int: # TODO What does this do? return", "tag(self) -> int: \"\"\" Returns the unique tag of the", "transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE,", "in self._type_data.attributes @property def tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\" Building tech", "against air units. Does not include upgrades. \"\"\" if self.type_id", "attacked. \"\"\" return self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache def buffs(self) ->", "buffs. \"\"\" return self._type_data._proto.movement_speed @property def is_mineral_field(self) -> bool: \"\"\"", "= False) -> UnitCommand: \"\"\" Orders a unit to patrol", "build extractors on empty geysers. \"\"\" return bool(self._proto.vespene_contents) @property def", "float]: \"\"\" Returns completion in range [0,1].\"\"\" return self._proto.build_progress @property", "is currently used in the unit. Note that some units", "player game. \"\"\" return self._proto.owner @property def position_tuple(self) -> Tuple[float,", "Position) from the first order, returns None if the unit", "can have. Returns 0 for units without energy. \"\"\" return", "MULE that is currently repairing. Only works for own units.", "Unit functions def has_buff(self, buff: BuffId) -> bool: \"\"\" Checks", "\"\"\" Returns string of this form: Unit(name='SCV', tag=4396941328). \"\"\" return", "WarpPrism. \"\"\" return {Unit(unit, self._bot_object) for unit in self._proto.passengers} @property_mutable_cache", "-> Union[int, float]: \"\"\" Returns the time until the unit", "bool(self._proto.cargo_space_taken) @property def cargo_size(self) -> Union[float, int]: \"\"\" Returns the", "ability is made available through research (like HT storm). :param", "(b.bonus, Attribute(b.attribute).name) else: return None @property def armor(self) -> Union[int,", "self.ability = ability self.target = target self.progress = progress def", "To calculate the 3d distance, use unit.position3d.distance_to(p) :param p: \"\"\"", "few mining.\"\"\" return self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self) ->", "weapon in self._weapons) return False @property_immutable_cache def can_attack_ground(self) -> bool:", "'Psionic', 'Massive', 'Structure'. \"\"\" # TODO: Consider units with ability", "0: return 0 return self._proto.health / self._proto.health_max @property def shield(self)", "int: \"\"\" Returns the upgrade level of the units shield.", "if self.orders: if isinstance(self.orders[0].target, int): return self.orders[0].target else: return Point2.from_proto(self.orders[0].target)", ":param bonus_distance: \"\"\" # TODO: Fix this because immovable units", "cargo_left(self) -> Union[float, int]: \"\"\" Returns how much cargo space", "unit to build another 'unit' at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position))", "Union[Unit, Point2, Point3]) -> Union[int, float]: \"\"\" Using the 2d", "the unit. Does not include upgrades or buffs. \"\"\" return", "def energy_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage of", "own units. \"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self) -> bool:", "\"\"\" return self(AbilityId.ATTACK, target=target, queue=queue) def gather(self, target: Unit, queue:", "include upgrades or buffs. \"\"\" return self._type_data._proto.movement_speed @property def is_mineral_field(self)", "@property_mutable_cache def orders(self) -> List[UnitOrder]: \"\"\" Returns the a list", "attack. Target can be a Unit or Point2. Attacking a", "({ability_id}) that has no cast range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target #", "or vespene geyser to deliver resources to townhall. Only works", "self(AbilityId.HOLDPOSITION, queue=queue) def stop(self, queue: bool = False) -> UnitCommand:", "tag={self.tag})\" @property_immutable_cache def type_id(self) -> UnitTypeId: \"\"\" UnitTypeId found in", "\"\"\" Returns the range against ground units. Does not include", "@property def health_max(self) -> Union[int, float]: \"\"\" Returns the maximum", ":param unit: :param position: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position,", "this formula if self.can_attack_ground and not target.is_flying: unit_attack_range = self.ground_range", "self._proto.is_blip @property def is_powered(self) -> bool: \"\"\" Checks if the", "move to 'position'. Target can be a Unit (to follow", "currently building. Only works for own units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV)", "no BuffId\" return buff in self.buffs def train(self, unit: UnitTypeId,", "no cast range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting abilities", "'biological' attribute. \"\"\" return IS_BIOLOGICAL in self._type_data.attributes @property def is_mechanical(self)", "cargo space is available at maximum. \"\"\" return self._proto.cargo_space_max @property", "UnitTypeId, position: Union[Point2, Point3] = None, queue: bool = False)", "= self._proto.unit_type if unit_type not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type)", "the same as CommandCenter For Hive, this returns [UnitTypeId.Hatchery, UnitTypeId.Lair]", "Checks if the unit is a non-empty vespene geyser or", "@property def can_be_attacked(self) -> bool: \"\"\" Checks if the unit", "\"\"\" Function not completed yet :param other_unit: :param angle_error: \"\"\"", "e.g. FlyingOrbitalCommand is the same as OrbitalCommand For flying OrbitalCommand,", "return self._proto.cloak in IS_CLOAKED @property def is_revealed(self) -> bool: \"\"\"", "-> bool: \"\"\" Checks if the unit has the 'light'", "this returns None \"\"\" return self._type_data.unit_alias @property_immutable_cache def _weapons(self): \"\"\"", "\"\"\" Checks if the unit is revealed. \"\"\" return self._proto.cloak", "0 for units without a shield. \"\"\" return self._proto.shield_upgrade_level @property", "def __hash__(self): return self.tag def __eq__(self, other): try: return self.tag", ":param queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self, unit: UnitTypeId,", "the unit type data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property def name(self)", "ground, like queen creep tumor, ravager bile, HT storm if", "that target other units, like transfuse, feedback, snipe, yamato if", "[UnitOrder.from_proto(order, self._bot_object) for order in self._proto.orders] @property_immutable_cache def order_target(self) ->", "BotAI): return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress,", "if ability_target_type in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit): return (", "or MULE is carrying (gold-)minerals. \"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache", "Point2, Point3], queue: bool = False) -> UnitCommand: \"\"\" Orders", "command starts and the target position. Can be queued up", "\"\"\" return not self._proto.orders def is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]]) ->", "assert isinstance(buff, BuffId), f\"{buff} is no BuffId\" return buff in", "def __eq__(self, other): try: return self.tag == other.tag except: return", "direction the unit is facing as a float in range", "\"\"\" return self._proto.is_hallucination @property def attack_upgrade_level(self) -> int: \"\"\" Returns", "the unit is hostile. \"\"\" return self._proto.alliance == IS_ENEMY @property", "bool: \"\"\" Checks if unit has buff 'buff'. \"\"\" assert", "buffs(self) -> Set: \"\"\" Returns the set of current buffs", "is carrying a resource. \"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def", "-> bool: \"\"\" Checks if the unit is a non-empty", "starts and the target position. Can be queued up to", "of this form: Unit(name='SCV', tag=4396941328). \"\"\" return f\"Unit(name={self.name !r}, tag={self.tag})\"", "\"\"\" return self._proto.cloak in IS_CLOAKED @property def is_revealed(self) -> bool:", "\"\"\" Returns the 3d position of the unit. \"\"\" return", "is_carrying_resource(self) -> bool: \"\"\" Checks if a worker is carrying", "units. \"\"\" return self._proto.shield @property def shield_max(self) -> Union[int, float]:", "friendly units need the space. :param queue: \"\"\" return self(AbilityId.STOP,", "OrbitalCommand is the same as CommandCenter For Hive, this returns", "_type_data(self) -> \"UnitTypeData\": \"\"\" Provides the unit type data. \"\"\"", "n mineral patches on that base.\"\"\" return self._proto.ideal_harvesters @property def", "returns -1 for units that can't attack. Usage: if unit.weapon_cooldown", "\"\"\" if self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return True if self._weapons:", "unit. Does not include shields. \"\"\" return self._proto.health @property def", "position of the unit! It does not give any information", "for units without a shield. \"\"\" return self._proto.shield_upgrade_level @property def", "# TODO: Fix this because immovable units (sieged tank, planetary", "return self.type_id in transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self) ->", "+ bonus_distance ) return False @property def facing(self) -> Union[int,", "available at maximum. \"\"\" return self._proto.cargo_space_max @property def cargo_left(self) ->", "annotations import warnings from typing import Any, Dict, List, Optional,", "def in_ability_cast_range( self, ability_id: AbilityId, target: Union[Unit, Point2], bonus_distance: float", "or WarpPrism. \"\"\" return {Unit(unit, self._bot_object) for unit in self._proto.passengers}", "\"\"\" return {Unit(unit, self._bot_object) for unit in self._proto.passengers} @property_mutable_cache def", "return self._type_data.cargo_size @property def cargo_max(self) -> Union[float, int]: \"\"\" How", "UnitCommand: \"\"\" Orders a unit to patrol between position it", "\"\"\" Checks if the unit is only available as a", "*args, **kwargs) -> UnitCommand: \"\"\" Deprecated: This ability redirects to", "self._proto.alliance == IS_MINE @property def is_enemy(self) -> bool: \"\"\" Checks", "is on the screen. \"\"\" return self._proto.is_on_screen @property def is_blip(self)", "\"\"\" return self._proto.is_selected @property def is_on_screen(self) -> bool: \"\"\" Checks", "that is currently building. Only works for own units. \"\"\"", "again, returns -1 for units that can't attack. Usage: if", "higher, not visible ground appear this way. \"\"\" return self._proto.display_type", "detect and Photoncannons also need to be powered. \"\"\" return", "Checks if the unit is on the screen. \"\"\" return", "from .game_data import AbilityData class UnitOrder: @classmethod def from_proto(cls, proto,", "return Point2.from_proto(self.orders[0].target) return None @property def noqueue(self) -> bool: \"\"\"", "return self._proto.build_progress @property def is_ready(self) -> bool: \"\"\" Checks if", "in range. Includes the target's radius when calculating distance to", "\"\"\" if self.can_attack: return self._proto.weapon_cooldown return -1 @property def engaged_target_tag(self)", "SCV, this returns None \"\"\" return self._type_data.tech_alias @property def unit_alias(self)", "TARGET_AIR), None) if weapon: return weapon.range return 0 @property_immutable_cache def", "if the unit is your own hallucination or detected. \"\"\"", "angle_error: \"\"\" pass @property def radius(self) -> Union[int, float]: \"\"\"", "unit is burrowed. \"\"\" return self._proto.is_burrowed @property def is_hallucination(self) ->", "return self._proto.is_on_screen @property def is_blip(self) -> bool: \"\"\" Checks if", "unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius @property def build_progress(self)", "-> int: \"\"\" Returns the number of workers currently gathering", "is completed. \"\"\" return self.build_progress == 1 @property def cloak(self)", "units. \"\"\" return self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self) -> int: \"\"\"", "Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self) -> Point3: \"\"\" Returns the 3d", "or attacking enemy units on higher, not visible ground appear", "-> UnitTypeId: \"\"\" UnitTypeId found in sc2/ids/unit_typeid. Caches all type_ids", "\"\"\" Returns the sight range of the unit. \"\"\" return", "AbilityId, target: Union[Unit, Point2], bonus_distance: float = 0 ) ->", "be removed soon, please use is_idle instead\", DeprecationWarning, stacklevel=2) return", "Alliance, Attribute, CloakState, DisplayType, Race, TargetType, warpgate_abilities, TargetType, Target from", "\"\"\" Checks if the unit is idle. \"\"\" warnings.warn(\"noqueue will", "attribute. \"\"\" return IS_ARMORED in self._type_data.attributes @property def is_biological(self) ->", "and attack everything on its way. :param target: :param queue:", "IS_STRUCTURE in self._type_data.attributes @property def is_light(self) -> bool: \"\"\" Checks", "def is_light(self) -> bool: \"\"\" Checks if the unit has", ".ids.ability_id import AbilityId from .ids.buff_id import BuffId from .ids.upgrade_id import", "return self._proto.tag @property def is_structure(self) -> bool: \"\"\" Checks if", "Checks if the unit has the 'biological' attribute. \"\"\" return", "include upgrades. \"\"\" if self.can_attack_air: weapon = next((weapon for weapon", "UnitTypeId.OrbitalCommand For SCV, this returns None \"\"\" return self._type_data.unit_alias @property_immutable_cache", "\"\"\" Returns the team the unit belongs to. \"\"\" return", "unit is detected by a sensor tower. \"\"\" return self._proto.is_blip", "unit can attack both ground and air units. \"\"\" if", "unit has. \"\"\" return {BuffId(buff_id) for buff_id in self._proto.buff_ids} @property_immutable_cache", "\"\"\" Checks if the unit is detected by a sensor", "import BotAI from .game_data import AbilityData class UnitOrder: @classmethod def", "be passed instead of AbilityId. :param upgrade: :param queue: \"\"\"", "in self._type_data.attributes @property def is_armored(self) -> bool: \"\"\" Checks if", "Returns the team the unit belongs to. \"\"\" return self._proto.alliance", "return self._proto.is_hallucination @property def attack_upgrade_level(self) -> int: \"\"\" Returns the", "self._proto.engaged_target_tag # Unit functions def has_buff(self, buff: BuffId) -> bool:", "unit has the 'mechanical' attribute. \"\"\" return IS_MECHANICAL in self._type_data.attributes", "\"\"\" return IS_PSIONIC in self._type_data.attributes @property def tech_alias(self) -> Optional[List[UnitTypeId]]:", "Returns the amount of remaining frames of the visible timer", "\"\"\" return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def passengers(self) -> Set[Unit]: \"\"\"", "detector. Has to be completed in order to detect and", "# PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED FOR ENEMIES", "self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def", "-> Race: \"\"\" Returns the race of the unit \"\"\"", "\"\"\" return self._type_data.has_minerals @property def is_vespene_geyser(self) -> bool: \"\"\" Checks", "unit is in range or other friendly units need the", "works for own units. \"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self)", "the unit to move to 'position'. Target can be a", "found in sc2/ids/unit_typeid. Caches all type_ids of the same unit", "IS_BIOLOGICAL in self._type_data.attributes @property def is_mechanical(self) -> bool: \"\"\" Checks", "training or researching. \"\"\" return self._proto.is_active # PROPERTIES BELOW THIS", "position of the unit. \"\"\" return Point3.from_proto(self._proto.pos) def distance_to(self, p:", "\"\"\" Checks if the unit is a non-empty vespene geyser", "\"\"\" Checks if the unit is a mineral field. \"\"\"", ":param queue: \"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position) def", "the amount of minerals remaining in a mineral field. \"\"\"", "air_range(self) -> Union[int, float]: \"\"\" Returns the range against air", "Checks if the unit is completed. \"\"\" return self.build_progress ==", "return self._proto.energy / self._proto.energy_max @property def is_snapshot(self) -> bool: \"\"\"", "TODO: a function that checks if this unit is facing", "Checks if the unit is only available as a snapshot", "\"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self) -> bool: \"\"\" Checks", "can air attack at all. Does not include upgrades. \"\"\"", "self._type_data._proto.weapons except: return None @property_immutable_cache def can_attack(self) -> bool: \"\"\"", "warnings.simplefilter(\"once\") if TYPE_CHECKING: from .bot_ai import BotAI from .game_data import", "THIS COMMENT ARE NOT POPULATED FOR SNAPSHOTS @property def mineral_contents(self)", "mineral field or vespene geyser to mine. Only works for", "include upgrades. \"\"\" if self.can_attack_ground: weapon = next((weapon for weapon", "or multiple attacks (Thor). if self._weapons: for weapon in self._weapons:", "not include shields. \"\"\" if self._proto.health_max == 0: return 0", ") def in_ability_cast_range( self, ability_id: AbilityId, target: Union[Unit, Point2], bonus_distance:", "remaining frames of the visible timer bar. # NOTE: Returns", "@property_immutable_cache def is_carrying_resource(self) -> bool: \"\"\" Checks if a worker", "return self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self) -> bool: \"\"\" Checks if", "if the unit is attacking. Only works for own units.", "return any(weapon.type in TARGET_AIR for weapon in self._weapons) return False", "'target'. :param target: :param queue: \"\"\" return self(AbilityId.HARVEST_RETURN, target=target, queue=queue)", "ability ({ability_id}) that has no cast range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target", "geyser has any gas remaining. You can't build extractors on", "a terran building has to land to connect to addon", "self.build_progress == 1 @property def cloak(self) -> CloakState: \"\"\" Returns", "and Photoncannons also need to be powered. \"\"\" return self.is_ready", "a unit is returning from mineral field or vespene geyser", "to addon \"\"\" return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def passengers(self) ->", "return self._proto.assigned_harvesters @property def ideal_harvesters(self) -> int: \"\"\" Returns the", ":param queue: \"\"\" return self(AbilityId.MOVE_MOVE, target=position, queue=queue) def scan_move(self, *args,", "when the command starts and the target position. Can be", "https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak @property def is_cloaked(self) -> bool: \"\"\"", "idle \"\"\" if self.orders: if isinstance(self.orders[0].target, int): return self.orders[0].target else:", "ability, target=None, queue: bool = False): return UnitCommand(ability, self, target=target,", "can_attack(self) -> bool: \"\"\" Checks if the unit can attack", "if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress, ) def __init__(self, ability: AbilityData,", "weapons in proto?! return bool(self._weapons) or self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}", "-> int: \"\"\" Returns the tag of the addon of", "shield_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage of shield", "self._type_data.attributes @property def is_light(self) -> bool: \"\"\" Checks if the", "Union[int, float] = 0) -> bool: \"\"\" Checks if the", "\"\"\" Checks if a unit is on its way to", "self._proto.detect_range @property_immutable_cache def is_detector(self) -> bool: \"\"\" Checks if the", "maximum shield points the unit can have. Returns 0 for", "unit has the 'armored' attribute. \"\"\" return IS_ARMORED in self._type_data.attributes", "{UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def can_attack_both(self) -> bool: \"\"\" Checks if", "is_on_screen(self) -> bool: \"\"\" Checks if the unit is on", "\"\"\" return self._proto.is_burrowed @property def is_hallucination(self) -> bool: \"\"\" Returns", "'unit' at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit: :param position:", "bool: \"\"\" Checks if the unit can air attack at", "self._proto.owner @property def position_tuple(self) -> Tuple[float, float]: \"\"\" Returns the", "size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius @property def build_progress(self) ->", "is_idle(self) -> bool: \"\"\" Checks if unit is idle. \"\"\"", ":param target: :param queue: \"\"\" return self(AbilityId.ATTACK, target=target, queue=queue) def", "to patrol between position it has when the command starts", "type)' if unit does 'bonus damage' against 'armor type'. Possible", "position(self) -> Point2: \"\"\" Returns the 2d position of the", "orders. :param queue: \"\"\" return self(AbilityId.HOLDPOSITION, queue=queue) def stop(self, queue:", "return self.is_ready and (self.type_id in IS_DETECTOR or self.type_id == UNIT_PHOTONCANNON", "UpgradeId to be passed instead of AbilityId. :param upgrade: :param", "self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range > 0, f\"Checking for an ability ({ability_id})", "ability self.target = target self.progress = progress def __repr__(self) ->", "upgrade level of the units attack. # NOTE: Returns 0", "else: self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack: return self._proto.weapon_cooldown return -1 @property", "def radius(self) -> Union[int, float]: \"\"\" Half of unit size.", "UnitCommand: \"\"\" Orders unit to attack. Target can be a", "of health the unit has. Does not include shields. \"\"\"", "(cast_range + self.radius + target.radius + bonus_distance) ** 2 )", "\"\"\" Checks if the unit is an SCV or MULE", "unit: :param position: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue)", "a detector. Has to be completed in order to detect", "function that checks if this unit is facing another unit", "'Structure'. \"\"\" # TODO: Consider units with ability attacks (Oracle,", "return self._proto.shield @property def shield_max(self) -> Union[int, float]: \"\"\" Returns", "has to land to connect to addon \"\"\" return self.position.offset(Point2((-2.5,", "Set[int]: \"\"\" Returns the tags of the units inside a", "other.tag except: return False def __call__(self, ability, target=None, queue: bool", "bot_object: BotAI): return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag),", "False @property_immutable_cache def can_attack_ground(self) -> bool: \"\"\" Checks if the", "Returns 0 for non-protoss units. \"\"\" return self._proto.shield @property def", "Returns completion in range [0,1].\"\"\" return self._proto.build_progress @property def is_ready(self)", "(Oracle, Baneling) or multiple attacks (Thor). if self._weapons: for weapon", "and are in the fog of war or attacking enemy", "TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED,", "is_carrying_minerals(self) -> bool: \"\"\" Checks if a worker or MULE", "List[UnitOrder]: \"\"\" Returns the a list of the current orders.", "in self._type_data.attributes @property def is_massive(self) -> bool: \"\"\" Checks if", "by a pylon or warppism. \"\"\" return self._proto.is_powered @property def", "the owner of the unit. This is a value of", "self._proto.display_type == IS_VISIBLE @property def alliance(self) -> Alliance: \"\"\" Returns", "\"\"\" Orders the unit to move to 'position'. Target can", "FOR ENEMIES @property_mutable_cache def orders(self) -> List[UnitOrder]: \"\"\" Returns the", "self(AbilityId.MOVE_MOVE, target=position, queue=queue) def scan_move(self, *args, **kwargs) -> UnitCommand: \"\"\"", "be queued up to seven patrol points. If the last", "-> Union[int, float]: \"\"\" Returns the dps against ground units.", "a geyser has any gas remaining. You can't build extractors", "the units shield. # NOTE: Returns 0 for units without", "\"\"\" return self._proto.buff_duration_max # PROPERTIES BELOW THIS COMMENT ARE NOT", "def is_returning(self) -> bool: \"\"\" Checks if a unit is", "armor typs are: 'Light', 'Armored', 'Biological', 'Mechanical', 'Psionic', 'Massive', 'Structure'.", "bonus_distance: \"\"\" # TODO: Fix this because immovable units (sieged", "float]: \"\"\" Returns the armor of the unit. Does not", "\"\"\" return self._proto.attack_upgrade_level @property def armor_upgrade_level(self) -> int: \"\"\" Returns", "int]: \"\"\" Returns the amount of cargo space the unit", "return self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def move(self, position: Union[Point2, Point3], queue:", "energy(self) -> Union[int, float]: \"\"\" Returns the amount of energy", "is_snapshot(self) -> bool: \"\"\" Checks if the unit is only", "None, queue: bool = False) -> UnitCommand: \"\"\" Orders unit", "return self._proto.display_type == IS_VISIBLE @property def alliance(self) -> Alliance: \"\"\"", "TARGET_AIR for weapon in self._weapons) return False @property_immutable_cache def air_dps(self)", "is_facing_unit(self, other_unit: Unit, angle_error: float = 1e-3) -> bool: \"\"\"", "@property def health_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage", "UnitTypeId from .position import Point2, Point3 from .unit_command import UnitCommand", "have weapons in proto?! return bool(self._weapons) or self.type_id in {UNIT_BATTLECRUISER,", "Union[int, float]: \"\"\" Returns the time until the unit can", "abilities that target other units, like transfuse, feedback, snipe, yamato", "if the unit is a detector. Has to be completed", "angle_error: float = 1e-3) -> bool: \"\"\" Function not completed", "target: :param queue: \"\"\" return self(AbilityId.ATTACK, target=target, queue=queue) def gather(self,", "resources to townhall. Only works for own units. \"\"\" return", "to cast an ability on the target without checking ability", "target, progress: float = None): \"\"\" :param ability: :param target:", "IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT,", "weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name) else: return None @property def armor(self)", "-> bool: \"\"\" Checks if unit has buff 'buff'. \"\"\"", "(Thor). if self._weapons: for weapon in self._weapons: if weapon.damage_bonus: b", "and isinstance(target, Unit): return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range +", "sc2/ids/unit_typeid. Caches all type_ids of the same unit type. \"\"\"", "2d position of the unit as tuple without conversion to", "import warnings from typing import Any, Dict, List, Optional, Set,", "bool: \"\"\" Returns True if the unit is your own", "the number of workers currently gathering resources at a geyser", "in_ability_cast_range( self, ability_id: AbilityId, target: Union[Unit, Point2], bonus_distance: float =", "a non-empty vespene geyser or gas extraction building. \"\"\" return", "weapon.attacks) / weapon.speed return 0 @property_immutable_cache def air_range(self) -> Union[int,", "unit_attack_range = self.ground_range elif self.can_attack_air and (target.is_flying or target.type_id ==", "the units attack. # NOTE: Returns 0 for units without", "self.orders[0].target else: return Point2.from_proto(self.orders[0].target) return None @property def noqueue(self) ->", "{self.target}, {self.progress})\" class Unit: def __init__(self, proto_data, bot_object: BotAI): \"\"\"", "@property def is_cloaked(self) -> bool: \"\"\" Checks if the unit", "powered by a pylon or warppism. \"\"\" return self._proto.is_powered @property", "energy_max(self) -> Union[int, float]: \"\"\" Returns the maximum amount of", "unit.weapon_cooldown == 0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else:", "point is the same as the starting point, the unit", "the unit is revealed or not cloaked and therefore can", "Checks if a unit is returning from mineral field or", "there and attack everything on its way. :param target: :param", "units without a weapon. \"\"\" return self._proto.attack_upgrade_level @property def armor_upgrade_level(self)", "return self._proto.buff_duration_remain @property def buff_duration_max(self) -> int: \"\"\" Returns the", "remaining in a geyser. \"\"\" return self._proto.vespene_contents @property def has_vespene(self)", "transforming. Only works for own units. \"\"\" return self.type_id in", "MULE is carrying (gold-)minerals. \"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def", "def add_on_land_position(self) -> Point2: \"\"\" If unit is addon (techlab", "not cloaked and therefore can be attacked. \"\"\" return self._proto.cloak", "return self._proto.owner @property def position_tuple(self) -> Tuple[float, float]: \"\"\" Returns", "\"\"\" Returns the target tag (if it is a Unit)", "if this unit has any units loaded. \"\"\" return bool(self._proto.cargo_space_taken)", "bool: \"\"\" Checks if the unit is on the screen.", "the unit has. Returns 0 for units without energy. \"\"\"", "queue: \"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position) def attack(self,", "units without energy. \"\"\" return self._proto.energy @property def energy_max(self) ->", "the unit is moving. Only works for own units. \"\"\"", "or not cloaked and therefore can be attacked. \"\"\" return", "doesnt have weapons in proto?! return bool(self._weapons) or self.type_id in", "Unit) or Point2 (if it is a Position) from the", "for weapon in self._weapons if weapon.type in TARGET_GROUND), None) if", "left in the unit. \"\"\" return self._proto.cargo_space_max - self._proto.cargo_space_taken @property", "level of the units attack. # NOTE: Returns 0 for", "patrol points. If the last point is the same as", "attack. Usage: if unit.weapon_cooldown == 0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown <", "unit is idle \"\"\" if self.orders: if isinstance(self.orders[0].target, int): return", "this unit has any units loaded. \"\"\" return bool(self._proto.cargo_space_taken) @property", "is_armored(self) -> bool: \"\"\" Checks if the unit has the", "the unit is visible for the bot. NOTE: This means", "Union[float, int]: \"\"\" Returns how much cargo space is currently", "Baneling) or multiple attacks (Thor). if self._weapons: for weapon in", "-> bool: \"\"\" Checks if a unit is gathering or", "fire again, returns -1 for units that can't attack. Usage:", "normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position) def attack(self, target: Union[Unit,", "(if it is a Unit) or Point2 (if it is", "transfuse, feedback, snipe, yamato if ability_target_type in {Target.Unit.value, Target.PointOrUnit.value} and", "Only works for own units. \"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache def", "mining, a negative int if it has too few mining.\"\"\"", "too many harvesters mining, a negative int if it has", "geyser or gas extraction building. \"\"\" return self._type_data.has_vespene @property def", "calculate the 3d distance, use unit.position3d.distance_to(p) :param p: \"\"\" if", "must be a mineral patch or a gas extraction building.", "not self._proto.orders def is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]]) -> bool: \"\"\"", "conversion to Point2. \"\"\" return self._proto.pos.x, self._proto.pos.y @property_immutable_cache def position(self)", "of the units shield. # NOTE: Returns 0 for units", "buff 'buff'. \"\"\" assert isinstance(buff, BuffId), f\"{buff} is no BuffId\"", "level of the units armor. \"\"\" return self._proto.armor_upgrade_level @property def", "(weapon.damage * weapon.attacks) / weapon.speed return 0 @property_immutable_cache def air_range(self)", "from .unit_command import UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING: from .bot_ai import", "\"\"\" Checks if the unit is visible for the bot.", "import property_immutable_cache, property_mutable_cache from .constants import ( transforming, IS_STRUCTURE, IS_LIGHT,", "IS_ARMORED in self._type_data.attributes @property def is_biological(self) -> bool: \"\"\" Checks", "Orders a unit to patrol between position it has when", ") -> bool: \"\"\" Test if a unit is able", "def attack(self, target: Union[Unit, Point2, Point3], queue: bool = False)", "return self._proto.vespene_contents @property def has_vespene(self) -> bool: \"\"\" Checks if", "has_add_on(self) -> bool: \"\"\" Checks if unit has an addon", "bool: \"\"\" Checks if a geyser has any gas remaining.", "the current orders. \"\"\" return [UnitOrder.from_proto(order, self._bot_object) for order in", "works for own units. \"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self)", "unit can air attack at all. Does not include upgrades.", "resources at a geyser or mining base.\"\"\" return self._proto.assigned_harvesters @property", "checks if this unit is facing another unit def is_facing_unit(self,", "if self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_ground: weapon =", "self._bot_object) for unit in self._proto.passengers} @property_mutable_cache def passengers_tags(self) -> Set[int]:", "\"\"\" if not self.orders: return False if isinstance(abilities, AbilityId): abilities", "== UNIT_PHOTONCANNON and self.is_powered) @property def radar_range(self) -> Union[int, float]:", "import ( transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC,", "self.orders: if isinstance(self.orders[0].target, int): return self.orders[0].target else: return Point2.from_proto(self.orders[0].target) return", "if a geyser has any gas remaining. You can't build", "Point3]) -> UnitCommand: \"\"\" Orders Warpgate to warp in 'unit'", "non-protoss units. \"\"\" return self._proto.shield @property def shield_max(self) -> Union[int,", "direction of x axis.\"\"\" return self._proto.facing # TODO: a function", "\"\"\" Checks if the unit is flying. \"\"\" return self._proto.is_flying", "timer bar. \"\"\" return self._proto.buff_duration_max # PROPERTIES BELOW THIS COMMENT", "def is_blip(self) -> bool: \"\"\" Checks if the unit is", "surplus_harvesters(self) -> int: \"\"\" Returns a positive int if unit", "= ability self.target = target self.progress = progress def __repr__(self)", "of the visible timer bar. # NOTE: Returns 0 for", "You can't build extractors on empty geysers. \"\"\" return bool(self._proto.vespene_contents)", "# For casting abilities that target other units, like transfuse,", "return False @property_immutable_cache def air_dps(self) -> Union[int, float]: \"\"\" Returns", "float]: \"\"\" Half of unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return", "return IS_MASSIVE in self._type_data.attributes @property def is_psionic(self) -> bool: \"\"\"", "is in range. Includes the target's radius when calculating distance", "-> bool: \"\"\" Checks if the unit is a detector.", "passengers_tags(self) -> Set[int]: \"\"\" Returns the tags of the units", "Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit: :param position: :param queue: \"\"\"", "0 @property_immutable_cache def ground_range(self) -> Union[int, float]: \"\"\" Returns the", "def vespene_contents(self) -> int: \"\"\" Returns the amount of gas", "Point3 from .unit_command import UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING: from .bot_ai", "0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\"", "return self._proto.alliance == IS_ENEMY @property def owner_id(self) -> int: \"\"\"", "train(self, unit: UnitTypeId, queue: bool = False) -> UnitCommand: \"\"\"", "that have been scouted and are in the fog of", "\"\"\" Checks if the unit has the 'mechanical' attribute. \"\"\"", "all. Does not include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER:", "def energy(self) -> Union[int, float]: \"\"\" Returns the amount of", "2 ) def in_ability_cast_range( self, ability_id: AbilityId, target: Union[Unit, Point2],", "is_gathering(self) -> bool: \"\"\" Checks if a unit is on", "in the fog of war or attacking enemy units on", "a circle. :param position: :param queue: \"\"\" return self(AbilityId.PATROL, target=position,", "order in self._proto.orders] @property_immutable_cache def order_target(self) -> Optional[Union[int, Point2]]: \"\"\"", "include shields. \"\"\" return self._proto.health @property def health_max(self) -> Union[int,", "maximum amount of frames of the visible timer bar. #", "position: Union[Point2, Point3]) -> UnitCommand: \"\"\" Orders Warpgate to warp", "@property_mutable_cache def passengers(self) -> Set[Unit]: \"\"\" Returns the units inside", "it has when the command starts and the target position.", "self.tag def __eq__(self, other): try: return self.tag == other.tag except:", "is a non-empty vespene geyser or gas extraction building. \"\"\"", "return self(AbilityId.SCAN_MOVE, *args, **kwargs) def hold_position(self, queue: bool = False)", "IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH,", "None) if weapon: return weapon.range return 0 @property_immutable_cache def can_attack_air(self)", "\"\"\" Checks if the unit can attack ground units. \"\"\"", "the unit. \"\"\" return self._proto.detect_range @property_immutable_cache def is_detector(self) -> bool:", "-> bool: \"\"\" Checks if the unit is on the", "to train another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param queue:", "\"\"\" Orders unit to train another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param", "self._proto.shield @property def shield_max(self) -> Union[int, float]: \"\"\" Returns the", "Returns 0 for units without energy. \"\"\" if self._proto.energy_max ==", "return resource. Does not need a 'target'. :param target: :param", "is IS_REVEALED @property def can_be_attacked(self) -> bool: \"\"\" Checks if", "a resource. \"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self) ->", "progress: \"\"\" self.ability = ability self.target = target self.progress =", "\"\"\" How much cargo space is available at maximum. \"\"\"", "health_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage of health", "of gas remaining in a geyser. \"\"\" return self._proto.vespene_contents @property", "move until it gets new orders. :param queue: \"\"\" return", "self._proto.vespene_contents @property def has_vespene(self) -> bool: \"\"\" Checks if a", "weapon.attacks) / weapon.speed return 0 @property_immutable_cache def ground_range(self) -> Union[int,", "can't attack. Usage: if unit.weapon_cooldown == 0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown", "if the unit has the 'armored' attribute. \"\"\" return IS_ARMORED", "of the unit \"\"\" return Race(self._type_data._proto.race) @property def tag(self) ->", "# TODO: Consider units with ability attacks (Oracle, Baneling) or", "unit: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self, unit:", "\"\"\" Returns how much cargo space is currently used in", "return self.tag def __eq__(self, other): try: return self.tag == other.tag", "_weapons(self): \"\"\" Returns the weapons of the unit. \"\"\" try:", "the bot. Enemy buildings that have been scouted and are", "same as the starting point, the unit will patrol in", "\"\"\" # TODO: Consider units with ability attacks (Oracle, Baneling)", "of unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius @property def", "team the unit belongs to. \"\"\" return self._proto.alliance @property def", "target is in range. Includes the target's radius when calculating", "not include upgrades. \"\"\" if self.can_attack_ground: weapon = next((weapon for", "range [0,1].\"\"\" return self._proto.build_progress @property def is_ready(self) -> bool: \"\"\"", "NOTE: This means the bot has vision of the position", "\"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position) def attack(self, target:", "is a detector. Has to be completed in order to", "unit is attacking. Only works for own units. \"\"\" return", "Set[Unit]: \"\"\" Returns the units inside a Bunker, CommandCenter, PlanetaryFortress,", "immovable units (sieged tank, planetary fortress etc.) have a little", "armor type)' if unit does 'bonus damage' against 'armor type'.", "is idle. \"\"\" return not self._proto.orders def is_using_ability(self, abilities: Union[AbilityId,", "IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR,", "Union[int, float]: \"\"\" Returns the range against air units. Does", "on its way. :param target: :param queue: \"\"\" return self(AbilityId.ATTACK,", "or gas. 'Target' must be a mineral patch or a", "the ideal harverster count for unit. 3 for gas buildings,", "is revealed. \"\"\" return self._proto.cloak is IS_REVEALED @property def can_be_attacked(self)", "'armor type'. Possible armor typs are: 'Light', 'Armored', 'Biological', 'Mechanical',", "= None): \"\"\" :param ability: :param target: :param progress: \"\"\"", "return_resource(self, target: Unit = None, queue: bool = False) ->", "on the target without checking ability cooldown (like stalker blink)", "of the units inside a Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus,", "f\"Checking for an ability ({ability_id}) that has no cast range\"", "bool: \"\"\" Checks if a unit is gathering or returning.", "is_carrying_vespene(self) -> bool: \"\"\" Checks if a worker is carrying", "IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE,", "can be a Unit or Point2. Attacking a position will", "return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self) -> Union[int, float]: \"\"\"", "-> bool: \"\"\" Checks if the unit is a mineral", "IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from .data", "of the unit. \"\"\" return self._proto.detect_range @property_immutable_cache def is_detector(self) ->", "harvesters mining, a negative int if it has too few", "units need the space. :param queue: \"\"\" return self(AbilityId.STOP, queue=queue)", "Returns 0 for units without energy. \"\"\" return self._proto.energy_max @property", "target=position, queue=queue) def research(self, upgrade: UpgradeId, queue: bool = False)", "== other.tag except: return False def __call__(self, ability, target=None, queue:", "not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self) -> bool: \"\"\" Checks if", "Checks if the unit has the 'mechanical' attribute. \"\"\" return", ":param target: :param bonus_distance: \"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range", "queue=queue) def stop(self, queue: bool = False) -> UnitCommand: \"\"\"", "not include upgrades \"\"\" return self._type_data._proto.armor @property def sight_range(self) ->", "False) -> UnitCommand: \"\"\" Orders unit to train another 'unit'.", "self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self) -> bool: \"\"\" Checks if the", "from the first order, returns None if the unit is", "\"\"\" Returns the weapons of the unit. \"\"\" try: return", "return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self) -> bool: \"\"\" Checks if", "has the 'biological' attribute. \"\"\" return IS_BIOLOGICAL in self._type_data.attributes @property", "in self._weapons if weapon.type in TARGET_GROUND), None) if weapon: return", "the unit can have. Returns 0 for units without energy.", "= self.ground_range elif self.can_attack_air and (target.is_flying or target.type_id == UNIT_COLOSSUS):", "'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit: :param position: :param queue:", "as OrbitalCommand For flying OrbitalCommand, this returns UnitTypeId.OrbitalCommand For SCV,", "self._type_data.attributes @property def is_mechanical(self) -> bool: \"\"\" Checks if the", "Returns 0 for units without energy. \"\"\" return self._proto.energy @property", "= 1e-3) -> bool: \"\"\" Function not completed yet :param", "-> bool: \"\"\" Test if a unit is able to", "-> bool: \"\"\" Checks if the unit is completed. \"\"\"", "return 0 return self._proto.shield / self._proto.shield_max @property def energy(self) ->", "None): \"\"\" :param ability: :param target: :param progress: \"\"\" self.ability", "Fix this because immovable units (sieged tank, planetary fortress etc.)", "units on higher, not visible ground appear this way. \"\"\"", "and self.is_powered) @property def radar_range(self) -> Union[int, float]: return self._proto.radar_range", "or researching. \"\"\" return self._proto.is_active # PROPERTIES BELOW THIS COMMENT", "with ability attacks (Oracle, Baneling) or multiple attacks (Thor). if", "-> bool: \"\"\" Checks if a unit is on its", "\"\"\" return bool(self._proto.cargo_space_taken) @property def cargo_size(self) -> Union[float, int]: \"\"\"", "isinstance(target, Unit): return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range + self.radius", "that base.\"\"\" return self._proto.ideal_harvesters @property def surplus_harvesters(self) -> int: \"\"\"", "the unit. Does not include upgrades \"\"\" return self._type_data._proto.armor @property", "needs. \"\"\" return self._type_data.cargo_size @property def cargo_max(self) -> Union[float, int]:", "Unit, queue: bool = False) -> UnitCommand: \"\"\" Order an", "as CommandCenter For Hive, this returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV,", "mining base.\"\"\" return self._proto.assigned_harvesters @property def ideal_harvesters(self) -> int: \"\"\"", "self._type_data._proto.armor @property def sight_range(self) -> Union[int, float]: \"\"\" Returns the", "-> str: \"\"\" Returns string of this form: Unit(name='SCV', tag=4396941328).", "0 return self._proto.health / self._proto.health_max @property def shield(self) -> Union[int,", "Returns the tags of the units inside a Bunker, CommandCenter,", "BotAI): \"\"\" :param proto_data: :param bot_object: \"\"\" self._proto = proto_data", "same as CommandCenter For Hive, this returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For", "Unit): return self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p) def", "type_id(self) -> UnitTypeId: \"\"\" UnitTypeId found in sc2/ids/unit_typeid. Caches all", "by the bot. \"\"\" return self._proto.alliance == IS_MINE @property def", "{Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit): return ( self._bot_object._distance_squared_unit_to_unit(self, target) <=", "or Point2. :param position: :param queue: \"\"\" return self(AbilityId.MOVE_MOVE, target=position,", "\"\"\" return self.is_ready and (self.type_id in IS_DETECTOR or self.type_id ==", "def is_constructing_scv(self) -> bool: \"\"\" Checks if the unit is", "buildings that have been scouted and are in the fog", "to build another 'unit' at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param", "Only works for own units. \"\"\" if not self.orders: return", "TARGET_GROUND), None) if weapon: return (weapon.damage * weapon.attacks) / weapon.speed", "Building tech equality, e.g. OrbitalCommand is the same as CommandCenter", "@property_immutable_cache def buffs(self) -> Set: \"\"\" Returns the set of", "\"\"\" Returns the units inside a Bunker, CommandCenter, PlanetaryFortress, Medivac,", "Nydus, Overlord or WarpPrism. \"\"\" return {unit.tag for unit in", "Consider units with ability attacks (Oracle, Baneling) or multiple attacks", "for units without a timer bar. \"\"\" return self._proto.buff_duration_remain @property", "FOR SNAPSHOTS @property def mineral_contents(self) -> int: \"\"\" Returns the", "on its way to a mineral field or vespene geyser", "return self.orders[0].target else: return Point2.from_proto(self.orders[0].target) return None @property def noqueue(self)", "tuple) ): return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range + self.radius", "@property def is_snapshot(self) -> bool: \"\"\" Checks if the unit", "a Unit (to follow that unit) or Point2. :param position:", "@property def is_visible(self) -> bool: \"\"\" Checks if the unit", "Only works for own units. \"\"\" return self.type_id in transforming", "is available at maximum. \"\"\" return self._proto.cargo_space_max @property def cargo_left(self)", "\"\"\" # TODO BATTLECRUISER doesnt have weapons in proto?! return", "the 'light' attribute. \"\"\" return IS_LIGHT in self._type_data.attributes @property def", "the unit can fire again, returns -1 for units that", "\"\"\" return self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self) -> int: \"\"\" Returns", "the upgrade level of the units shield. # NOTE: Returns", "def shield_max(self) -> Union[int, float]: \"\"\" Returns the maximum shield", "UpgradeId, queue: bool = False) -> UnitCommand: \"\"\" Orders unit", "bool: \"\"\" Checks if the unit is detected by a", "ARE NOT POPULATED FOR SNAPSHOTS @property def mineral_contents(self) -> int:", "on empty geysers. \"\"\" return bool(self._proto.vespene_contents) @property def is_flying(self) ->", "Point3], queue: bool = False) -> UnitCommand: \"\"\" Orders a", "Checks if the unit is visible for the bot. NOTE:", "a worker is carrying vespene gas. \"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs)", "units inside a Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord or", "unit has an addon attached. \"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache def", "@property def is_revealed(self) -> bool: \"\"\" Checks if the unit", "self._proto.energy_max == 0: return 0 return self._proto.energy / self._proto.energy_max @property", "the set of current buffs the unit has. \"\"\" return", "ground units. Does not include upgrades. \"\"\" if self.type_id ==", "Returns the maximum amount of energy the unit can have.", "units. \"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self) -> bool: \"\"\"", "\"\"\" return [UnitOrder.from_proto(order, self._bot_object) for order in self._proto.orders] @property_immutable_cache def", "self._proto.alliance @property def is_mine(self) -> bool: \"\"\" Checks if the", "UnitCommand: \"\"\" Orders the unit to move to 'position'. Target", "return self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self,", "UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED,", "proto?! return bool(self._weapons) or self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def", "creep tumor, ravager bile, HT storm if ability_target_type in {Target.Point.value,", "-> Union[int, float]: \"\"\" Returns the dps against air units.", "the unit has the 'mechanical' attribute. \"\"\" return IS_MECHANICAL in", "upgrades. \"\"\" if self.can_attack_ground: weapon = next((weapon for weapon in", "Includes the target's radius when calculating distance to target. :param", "This ability redirects to 'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE, *args, **kwargs)", "passengers(self) -> Set[Unit]: \"\"\" Returns the units inside a Bunker,", "form: Unit(name='SCV', tag=4396941328). \"\"\" return f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache def", "Enemy buildings that have been scouted and are in the", "because immovable units (sieged tank, planetary fortress etc.) have a", "return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self) -> bool: \"\"\" Checks", "self._weapons if weapon.type in TARGET_GROUND), None) if weapon: return weapon.range", "# TODO: a function that checks if this unit is", "= self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting abilities that target other units,", "detection distance of the unit. \"\"\" return self._proto.detect_range @property_immutable_cache def", "False) -> UnitCommand: \"\"\" Orders unit to build another 'unit'", "is carrying vespene gas. \"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def", "unit to attack. Target can be a Unit or Point2.", "@property def movement_speed(self) -> Union[int, float]: \"\"\" Returns the movement", "research (like HT storm). :param ability_id: :param target: :param bonus_distance:", "-> Union[int, float]: return self._proto.radar_range @property def is_selected(self) -> bool:", "0 @property_immutable_cache def bonus_damage(self): \"\"\" Returns a tuple of form", "the unit is currently selected. \"\"\" return self._proto.is_selected @property def", "in CAN_BE_ATTACKED @property_immutable_cache def buffs(self) -> Set: \"\"\" Returns the", "in sc2/ids/unit_typeid. Caches all type_ids of the same unit type.", "gets new orders. :param queue: \"\"\" return self(AbilityId.HOLDPOSITION, queue=queue) def", "\"\"\" Returns direction the unit is facing as a float", "without energy. \"\"\" if self._proto.energy_max == 0: return 0 return", "@property_immutable_cache def _weapons(self): \"\"\" Returns the weapons of the unit.", "in TARGET_BOTH for weapon in self._weapons) return False @property_immutable_cache def", "units. Does not include upgrades. \"\"\" if self.can_attack_ground: weapon =", "is in direction of x axis.\"\"\" return self._proto.facing # TODO:", "self._proto.radar_range @property def is_selected(self) -> bool: \"\"\" Checks if the", "or vespene geyser to mine. Only works for own units.", "\"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self) -> bool: \"\"\" Checks", "TODO: Consider units with ability attacks (Oracle, Baneling) or multiple", "a mineral field. \"\"\" return self._proto.mineral_contents @property def vespene_contents(self) ->", "# PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED FOR SNAPSHOTS", "Used by property_immutable_cache self.cache = {} def __repr__(self) -> str:", "int]: \"\"\" How much cargo space is available at maximum.", "space is currently left in the unit. \"\"\" return self._proto.cargo_space_max", "warpgate_abilities, TargetType, Target from .ids.ability_id import AbilityId from .ids.buff_id import", "unit can have. Returns 0 for units without energy. \"\"\"", "casting abilities on the ground, like queen creep tumor, ravager", "a unit is gathering or returning. Only works for own", "float]: \"\"\" Returns the movement speed of the unit. Does", "-> Union[float, int]: \"\"\" Returns the amount of cargo space", "BuffId), f\"{buff} is no BuffId\" return buff in self.buffs def", "def assigned_harvesters(self) -> int: \"\"\" Returns the number of workers", "progress def __repr__(self) -> str: return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class", "isinstance( target, (Point2, tuple) ): return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <=", "of frames of the visible timer bar. # NOTE: Returns", "target: Unit, bonus_distance: Union[int, float] = 0) -> bool: \"\"\"", "self._proto.radius @property def build_progress(self) -> Union[int, float]: \"\"\" Returns completion", "UnitCommand: \"\"\" Orders a unit to stop moving. It will", "{unit.tag for unit in self._proto.passengers} @property def cargo_used(self) -> Union[float,", "is_constructing_scv(self) -> bool: \"\"\" Checks if the unit is an", "@property def radar_range(self) -> Union[int, float]: return self._proto.radar_range @property def", "for weapon in self._weapons) return False @property_immutable_cache def air_dps(self) ->", "self._proto.is_active # PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED FOR", "in a circle. :param position: :param queue: \"\"\" return self(AbilityId.PATROL,", "self._proto.tag @property def is_structure(self) -> bool: \"\"\" Checks if the", "@property def mineral_contents(self) -> int: \"\"\" Returns the amount of", "the unit will patrol in a circle. :param position: :param", "for non-protoss units. \"\"\" return self._proto.shield_max @property def shield_percentage(self) ->", "owner of the unit. This is a value of 1", "return -1 @property def engaged_target_tag(self) -> int: # TODO What", "-> bool: \"\"\" Checks if the unit has the 'massive'", "self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def research(self, upgrade: UpgradeId, queue: bool =", "IS_LIGHT in self._type_data.attributes @property def is_armored(self) -> bool: \"\"\" Checks", "Returns the 3d position of the unit. \"\"\" return Point3.from_proto(self._proto.pos)", "if the unit is controlled by the bot. \"\"\" return", "if the unit is idle. \"\"\" warnings.warn(\"noqueue will be removed", "instead\", DeprecationWarning, stacklevel=2) return self.is_idle @property def is_idle(self) -> bool:", "\"\"\" Returns the amount of energy the unit has. Returns", "shield_upgrade_level(self) -> int: \"\"\" Returns the upgrade level of the", "# NOTE: Returns 0 for units without a shield. \"\"\"", "Returns string of this form: Unit(name='SCV', tag=4396941328). \"\"\" return f\"Unit(name={self.name", "\"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def __hash__(self): return self.tag def", "@property_immutable_cache def is_constructing_scv(self) -> bool: \"\"\" Checks if the unit", "@property def vespene_contents(self) -> int: \"\"\" Returns the amount of", "own units. \"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self) -> bool:", "self._type_data._proto.sight_range @property def movement_speed(self) -> Union[int, float]: \"\"\" Returns the", "Union[int, float]: \"\"\" Half of unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\"", "for non-protoss units. \"\"\" return self._proto.shield @property def shield_max(self) ->", "able to cast an ability on the target without checking", "-> int: \"\"\" Returns the ideal harverster count for unit.", "IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING,", "self._type_data.attributes @property def is_armored(self) -> bool: \"\"\" Checks if the", "\"\"\" Checks if a worker is carrying vespene gas. \"\"\"", "has_buff(self, buff: BuffId) -> bool: \"\"\" Checks if unit has", "-> str: \"\"\" Returns the name of the unit. \"\"\"", "Tuple[float, float]: \"\"\" Returns the 2d position of the unit", "of the unit. \"\"\" return self._proto.tag @property def is_structure(self) ->", "queue=queue) def gather(self, target: Unit, queue: bool = False) ->", "unit has buff 'buff'. \"\"\" assert isinstance(buff, BuffId), f\"{buff} is", "is an SCV that is currently building. Only works for", "x axis.\"\"\" return self._proto.facing # TODO: a function that checks", "units without a timer bar. \"\"\" return self._proto.buff_duration_remain @property def", "@property def energy(self) -> Union[int, float]: \"\"\" Returns the amount", "without a timer bar. \"\"\" return self._proto.buff_duration_remain @property def buff_duration_max(self)", "the unit is your own hallucination or detected. \"\"\" return", "equality, e.g. OrbitalCommand is the same as CommandCenter For Hive,", "None @property def noqueue(self) -> bool: \"\"\" Checks if the", "tumor, ravager bile, HT storm if ability_target_type in {Target.Point.value, Target.PointOrUnit.value}", "\"\"\" Checks if unit has an addon attached. \"\"\" return", "percentage of amount of energy the unit has. Returns 0", "available as a snapshot for the bot. Enemy buildings that", "import UpgradeId from .ids.unit_typeid import UnitTypeId from .position import Point2,", "# For casting abilities on the ground, like queen creep", "if unit_type not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type]", "__repr__(self) -> str: return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class Unit: def", "mineral field. \"\"\" return self._proto.mineral_contents @property def vespene_contents(self) -> int:", "'buff'. \"\"\" assert isinstance(buff, BuffId), f\"{buff} is no BuffId\" return", "unit. \"\"\" return Point3.from_proto(self._proto.pos) def distance_to(self, p: Union[Unit, Point2, Point3])", "to return resource. Does not need a 'target'. :param target:", ") from .data import Alliance, Attribute, CloakState, DisplayType, Race, TargetType,", "def weapon_cooldown(self) -> Union[int, float]: \"\"\" Returns the time until", "if self._weapons: return any(weapon.type in TARGET_GROUND for weapon in self._weapons)", "this form: Unit(name='SCV', tag=4396941328). \"\"\" return f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache", "if ability is made available through research (like HT storm).", "bonus_distance) ** 2 ) # For casting abilities on the", "weapon. \"\"\" return self._proto.attack_upgrade_level @property def armor_upgrade_level(self) -> int: \"\"\"", "bool = False) -> UnitCommand: \"\"\" Orders a unit to", "bool: \"\"\" Checks if the unit is idle. \"\"\" warnings.warn(\"noqueue", "\"\"\" return self._proto.shield_max @property def shield_percentage(self) -> Union[int, float]: \"\"\"", "units loaded. \"\"\" return bool(self._proto.cargo_space_taken) @property def cargo_size(self) -> Union[float,", "the 3d distance, use unit.position3d.distance_to(p) :param p: \"\"\" if isinstance(p,", "UNIT_BATTLECRUISER: return True if self._weapons: return any(weapon.type in TARGET_BOTH for", "Does not include shields. \"\"\" if self._proto.health_max == 0: return", "def is_moving(self) -> bool: \"\"\" Checks if the unit is", "if it has too few mining.\"\"\" return self._proto.assigned_harvesters - self._proto.ideal_harvesters", "Returns the sight range of the unit. \"\"\" return self._type_data._proto.sight_range", "the unit as tuple without conversion to Point2. \"\"\" return", "= False) -> UnitCommand: \"\"\" Orders unit to train another", "\"\"\" return self(AbilityId.STOP, queue=queue) def patrol(self, position: Union[Point2, Point3], queue:", "patrol(self, position: Union[Point2, Point3], queue: bool = False) -> UnitCommand:", "self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self) -> \"UnitTypeData\": \"\"\" Provides the unit", "use unit.position3d.distance_to(p) :param p: \"\"\" if isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self,", "Orders a unit to stop, but can start to move", "-> bool: \"\"\" Checks if the unit can attack both", "-> Optional[Union[int, Point2]]: \"\"\" Returns the target tag (if it", "if the unit is detected by a sensor tower. \"\"\"", "units without energy. \"\"\" if self._proto.energy_max == 0: return 0", "\"\"\" return self._type_data.has_vespene @property def health(self) -> Union[int, float]: \"\"\"", "gas extraction building. \"\"\" return self._type_data.has_vespene @property def health(self) ->", "float]: \"\"\" Returns the shield points the unit has. Returns", "the cloak status of the unit.\"\"\" return self._proto.display_type == IS_VISIBLE", "Point2 (if it is a Position) from the first order,", "mining.\"\"\" return self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self) -> Union[int,", "Returns the dps against air units. Does not include upgrades.", "in {Target.Point.value, Target.PointOrUnit.value} and isinstance( target, (Point2, tuple) ): return", "@property def is_ready(self) -> bool: \"\"\" Checks if the unit", "unit is gathering or returning. Only works for own units.", "self._type_data.attributes @property def is_psionic(self) -> bool: \"\"\" Checks if the", "the unit. \"\"\" return self._type_data.name @property def race(self) -> Race:", "-> int: \"\"\" Returns the amount of gas remaining in", "@property def buff_duration_remain(self) -> int: \"\"\" Returns the amount of", "-> bool: \"\"\" Checks if this unit has any units", "point, the unit will patrol in a circle. :param position:", "against air units. Does not include upgrades. \"\"\" if self.can_attack_air:", "the unit \"\"\" return Race(self._type_data._proto.race) @property def tag(self) -> int:", "\"\"\" pass @property def radius(self) -> Union[int, float]: \"\"\" Half", "dps against ground units. Does not include upgrades. \"\"\" if", "two player game. \"\"\" return self._proto.owner @property def position_tuple(self) ->", "return [UnitOrder.from_proto(order, self._bot_object) for order in self._proto.orders] @property_immutable_cache def order_target(self)", "self._proto.armor_upgrade_level @property def shield_upgrade_level(self) -> int: \"\"\" Returns the upgrade", "-> Set[int]: \"\"\" Returns the tags of the units inside", "cloak(self) -> CloakState: \"\"\" Returns cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\"", "def is_powered(self) -> bool: \"\"\" Checks if the unit is", "self._proto.cloak in IS_CLOAKED @property def is_revealed(self) -> bool: \"\"\" Checks", "int]: \"\"\" Returns how much cargo space is currently left", "bool: \"\"\" Checks if the unit can attack ground units.", "return self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self, target: Unit, bonus_distance: Union[int, float]", "for order in self._proto.orders] @property_immutable_cache def order_target(self) -> Optional[Union[int, Point2]]:", "0 for units without a timer bar. \"\"\" return self._proto.buff_duration_remain", "unit) or Point2. :param position: :param queue: \"\"\" return self(AbilityId.MOVE_MOVE,", "2 in a two player game. \"\"\" return self._proto.owner @property", "distance to target. :param target: :param bonus_distance: \"\"\" # TODO:", "armor of the unit. Does not include upgrades \"\"\" return", "IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from .data import", "the percentage of amount of energy the unit has. Returns", "other_unit: :param angle_error: \"\"\" pass @property def radius(self) -> Union[int,", "is_detector(self) -> bool: \"\"\" Checks if the unit is a", "\"\"\" warnings.warn(\"noqueue will be removed soon, please use is_idle instead\",", "\"\"\" Checks if a unit is gathering or returning. Only", "self._proto.shield_upgrade_level @property def buff_duration_remain(self) -> int: \"\"\" Returns the amount", ":param repair_target: :param queue: \"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def", "\"\"\" Returns the set of current buffs the unit has.", "For SCV, this returns None \"\"\" return self._type_data.tech_alias @property def", "self._weapons if weapon.type in TARGET_GROUND), None) if weapon: return (weapon.damage", "\"\"\" Checks if the unit is currently selected. \"\"\" return", "= False) -> UnitCommand: \"\"\" Order an SCV or MULE", "self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def can_attack_both(self) -> bool: \"\"\"", "unit to stop moving. It will not move until it", "the command starts and the target position. Can be queued", "range of the unit. \"\"\" return self._type_data._proto.sight_range @property def movement_speed(self)", "ground_range(self) -> Union[int, float]: \"\"\" Returns the range against ground", "structure. \"\"\" return IS_STRUCTURE in self._type_data.attributes @property def is_light(self) ->", "to townhall. Only works for own units. \"\"\" return self.is_using_ability(IS_RETURNING)", "For SCV, this returns None \"\"\" return self._type_data.unit_alias @property_immutable_cache def", "take up more than one space. \"\"\" return self._proto.cargo_space_taken @property", "if weapon: return (weapon.damage * weapon.attacks) / weapon.speed return 0", "(proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress, ) def __init__(self, ability:", "\"\"\" self._proto = proto_data self._bot_object = bot_object # Used by", "self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_air: weapon = next((weapon", "its way. :param target: :param queue: \"\"\" return self(AbilityId.ATTACK, target=target,", "class UnitOrder: @classmethod def from_proto(cls, proto, bot_object: BotAI): return cls(", "has the 'massive' attribute. \"\"\" return IS_MASSIVE in self._type_data.attributes @property", "If the last point is the same as the starting", "tag of the addon of unit. \"\"\" return self._proto.add_on_tag @property", "PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED FOR ENEMIES @property_mutable_cache", "(self.type_id in IS_DETECTOR or self.type_id == UNIT_PHOTONCANNON and self.is_powered) @property", "target=target, queue=queue) def return_resource(self, target: Unit = None, queue: bool", "using one of the given abilities. Only works for own", "Point3: \"\"\" Returns the 3d position of the unit. \"\"\"", "def alliance(self) -> Alliance: \"\"\" Returns the team the unit", "Checks if the unit is flying. \"\"\" return self._proto.is_flying or", "if the unit can attack both ground and air units.", "@property_immutable_cache def position3d(self) -> Point3: \"\"\" Returns the 3d position", "unit is controlled by the bot. \"\"\" return self._proto.alliance ==", "an addon attached. \"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self) ->", "movement_speed(self) -> Union[int, float]: \"\"\" Returns the movement speed of", "cargo_size(self) -> Union[float, int]: \"\"\" Returns the amount of cargo", "Checks if this unit has any units loaded. \"\"\" return", "-> bool: \"\"\" Checks if the unit is controlled by", "a two player game. \"\"\" return self._proto.owner @property def position_tuple(self)", "\"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def research(self, upgrade: UpgradeId, queue:", "energy the unit can have. Returns 0 for units without", "cooldown (like stalker blink) or if ability is made available", "game. \"\"\" return self._proto.owner @property def position_tuple(self) -> Tuple[float, float]:", "@property def cargo_left(self) -> Union[float, int]: \"\"\" Returns how much", "bool: \"\"\" Checks if a worker or MULE is carrying", "this returns UnitTypeId.OrbitalCommand For SCV, this returns None \"\"\" return", "\"\"\" Returns the percentage of amount of energy the unit", "Point2]]: \"\"\" Returns the target tag (if it is a", "= False) -> UnitCommand: \"\"\" Orders a unit to gather", "BuffId) -> bool: \"\"\" Checks if unit has buff 'buff'.", "in self._weapons) return False @property_immutable_cache def air_dps(self) -> Union[int, float]:", "Check if the unit is using one of the given", "if self.can_attack_ground: weapon = next((weapon for weapon in self._weapons if", "Returns the owner of the unit. This is a value", "queue: \"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def __hash__(self): return self.tag", "return True if self._weapons: return any(weapon.type in TARGET_GROUND for weapon", "True if the unit is your own hallucination or detected.", "of the given abilities. Only works for own units. \"\"\"", "ground appear this way. \"\"\" return self._proto.display_type == IS_SNAPSHOT @property", "amount of cargo space the unit needs. \"\"\" return self._type_data.cargo_size", "@property def owner_id(self) -> int: \"\"\" Returns the owner of", "self._proto.build_progress @property def is_ready(self) -> bool: \"\"\" Checks if the", "IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING,", "target) <= (cast_range + self.radius + target.radius + bonus_distance) **", "Deprecated: This ability redirects to 'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE, *args,", "def cargo_max(self) -> Union[float, int]: \"\"\" How much cargo space", "has too many harvesters mining, a negative int if it", "def research(self, upgrade: UpgradeId, queue: bool = False) -> UnitCommand:", "def cargo_left(self) -> Union[float, int]: \"\"\" Returns how much cargo", "a position will make the unit move there and attack", "self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self, unit: UnitTypeId, position: Union[Point2, Point3] =", ":param queue: \"\"\" return self(AbilityId.HOLDPOSITION, queue=queue) def stop(self, queue: bool", "unit is on its way to a mineral field or", "\"\"\" Building tech equality, e.g. OrbitalCommand is the same as", "dps against air units. Does not include upgrades. \"\"\" if", "if unit does 'bonus damage' against 'armor type'. Possible armor", "cast range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting abilities that", "int]: \"\"\" Returns how much cargo space is currently used", "@property def facing(self) -> Union[int, float]: \"\"\" Returns direction the", "-> bool: \"\"\" Checks if the unit is currently selected.", "Union[int, float]: \"\"\" Returns the maximum amount of energy the", "not target.is_flying: unit_attack_range = self.ground_range elif self.can_attack_air and (target.is_flying or", "unit. \"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self) -> Point3: \"\"\"", "return self._proto.detect_range @property_immutable_cache def is_detector(self) -> bool: \"\"\" Checks if", "def is_flying(self) -> bool: \"\"\" Checks if the unit is", "without checking ability cooldown (like stalker blink) or if ability", "Checks if the unit is currently training or researching. \"\"\"", "@property_immutable_cache def weapon_cooldown(self) -> Union[int, float]: \"\"\" Returns the time", "target) <= (self.radius + target.radius + unit_attack_range + bonus_distance) **", "def mineral_contents(self) -> int: \"\"\" Returns the amount of minerals", "the dps against air units. Does not include upgrades. \"\"\"", "@property def is_burrowed(self) -> bool: \"\"\" Checks if the unit", "a unit is on its way to a mineral field", "\"\"\" Checks if the unit transforming. Only works for own", "non-protoss units. \"\"\" if self._proto.shield_max == 0: return 0 return", "f\"{buff} is no BuffId\" return buff in self.buffs def train(self,", "def tag(self) -> int: \"\"\" Returns the unique tag of", "@property def is_on_screen(self) -> bool: \"\"\" Checks if the unit", ":param target: :param queue: \"\"\" return self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def", "property_immutable_cache self.cache = {} def __repr__(self) -> str: \"\"\" Returns", "unit has the 'massive' attribute. \"\"\" return IS_MASSIVE in self._type_data.attributes", "or if ability is made available through research (like HT", "a float in range [0,2π). 0 is in direction of", "has. \"\"\" return {BuffId(buff_id) for buff_id in self._proto.buff_ids} @property_immutable_cache def", "is currently selected. \"\"\" return self._proto.is_selected @property def is_on_screen(self) ->", "in self._proto.orders] @property_immutable_cache def order_target(self) -> Optional[Union[int, Point2]]: \"\"\" Returns", "def __repr__(self) -> str: return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class Unit:", "use is_idle instead\", DeprecationWarning, stacklevel=2) return self.is_idle @property def is_idle(self)", "Can be queued up to seven patrol points. If the", "and p. To calculate the 3d distance, use unit.position3d.distance_to(p) :param", "storm if ability_target_type in {Target.Point.value, Target.PointOrUnit.value} and isinstance( target, (Point2,", "attacking enemy units on higher, not visible ground appear this", "self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self) -> Union[int, float]: \"\"\"", "belongs to. \"\"\" return self._proto.alliance @property def is_mine(self) -> bool:", "the unit. \"\"\" return self._type_data._proto.sight_range @property def movement_speed(self) -> Union[int,", "ability on the target without checking ability cooldown (like stalker", "bool: \"\"\" Checks if unit has an addon attached. \"\"\"", "the unit. Note that some units take up more than", "remaining. You can't build extractors on empty geysers. \"\"\" return", "bile, HT storm if ability_target_type in {Target.Point.value, Target.PointOrUnit.value} and isinstance(", "def armor_upgrade_level(self) -> int: \"\"\" Returns the upgrade level of", "False if isinstance(abilities, AbilityId): abilities = {abilities} return self.orders[0].ability.id in", "Returns the 2d position of the unit. \"\"\" return Point2.from_proto(self._proto.pos)", "ground units. Does not include upgrades. \"\"\" if self.can_attack_ground: weapon", "self.type_id == UNIT_PHOTONCANNON and self.is_powered) @property def radar_range(self) -> Union[int,", "returns None if the unit is idle \"\"\" if self.orders:", "the weapons of the unit. \"\"\" try: return self._type_data._proto.weapons except:", "the unit has the 'armored' attribute. \"\"\" return IS_ARMORED in", "and (self.type_id in IS_DETECTOR or self.type_id == UNIT_PHOTONCANNON and self.is_powered)", "from .bot_ai import BotAI from .game_data import AbilityData class UnitOrder:", "-> int: \"\"\" Returns the amount of minerals remaining in", "position: :param queue: \"\"\" return self(AbilityId.MOVE_MOVE, target=position, queue=queue) def scan_move(self,", "it gets new orders. :param queue: \"\"\" return self(AbilityId.HOLDPOSITION, queue=queue)", "NOT POPULATED FOR SNAPSHOTS @property def mineral_contents(self) -> int: \"\"\"", "given abilities. Only works for own units. \"\"\" if not", "against 'armor type'. Possible armor typs are: 'Light', 'Armored', 'Biological',", "in IS_CLOAKED @property def is_revealed(self) -> bool: \"\"\" Checks if", "IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING,", "Returns the maximum amount of frames of the visible timer", "IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS,", "on the ground, like queen creep tumor, ravager bile, HT", "try: return self._type_data._proto.weapons except: return None @property_immutable_cache def can_attack(self) ->", "any information about the cloak status of the unit.\"\"\" return", "if the unit has the 'mechanical' attribute. \"\"\" return IS_MECHANICAL", "the unit has. \"\"\" return {BuffId(buff_id) for buff_id in self._proto.buff_ids}", "target: :param queue: \"\"\" return self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def return_resource(self,", "if self.type_id == UNIT_BATTLECRUISER: return True if self._weapons: return any(weapon.type", "return self._proto.engaged_target_tag # Unit functions def has_buff(self, buff: BuffId) ->", "Checks if the unit can attack at all. \"\"\" #", "can_attack_both(self) -> bool: \"\"\" Checks if the unit can attack", "distance, use unit.position3d.distance_to(p) :param p: \"\"\" if isinstance(p, Unit): return", "weapon.speed return 0 @property_immutable_cache def air_range(self) -> Union[int, float]: \"\"\"", "non-empty vespene geyser or gas extraction building. \"\"\" return self._type_data.has_vespene", "is able to cast an ability on the target without", "Union[int, float]: \"\"\" Returns the maximum health of the unit.", "in a geyser. \"\"\" return self._proto.vespene_contents @property def has_vespene(self) ->", "the unit needs. \"\"\" return self._type_data.cargo_size @property def cargo_max(self) ->", "\"\"\" Checks if a worker is carrying a resource. \"\"\"", "damage, armor type)' if unit does 'bonus damage' against 'armor", "-> Union[int, float]: \"\"\" Returns the sight range of the", "unit has too many harvesters mining, a negative int if", "for n mineral patches on that base.\"\"\" return self._proto.ideal_harvesters @property", "vespene_contents(self) -> int: \"\"\" Returns the amount of gas remaining", "Point3], queue: bool = False) -> UnitCommand: \"\"\" Orders the", "\"\"\" return self._proto.detect_range @property_immutable_cache def is_detector(self) -> bool: \"\"\" Checks", "of the unit. Does not include shields. \"\"\" return self._proto.health", "has no cast range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting", "6 if self.can_attack_air: weapon = next((weapon for weapon in self._weapons", "unit. Does not include shields. \"\"\" return self._proto.health_max @property def", "type data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property def name(self) -> str:", "-> Optional[List[UnitTypeId]]: \"\"\" Building tech equality, e.g. OrbitalCommand is the", "is flying. \"\"\" return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self)", "(if it is a Position) from the first order, returns", "Medivac, Nydus, Overlord or WarpPrism. \"\"\" return {unit.tag for unit", "currently used in the unit. Note that some units take", "townhall. Only works for own units. \"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache", "self._proto.display_type == IS_SNAPSHOT @property def is_visible(self) -> bool: \"\"\" Checks", "bool: \"\"\" Checks if the target is in range. Includes", "ability: AbilityData, target, progress: float = None): \"\"\" :param ability:", "cloaked and therefore can be attacked. \"\"\" return self._proto.cloak in", "What does this do? return self._proto.engaged_target_tag # Unit functions def", "the 'biological' attribute. \"\"\" return IS_BIOLOGICAL in self._type_data.attributes @property def", "another 'unit' at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit: :param", "\"\"\" Returns the upgrade level of the units attack. #", "the amount of energy the unit has. Returns 0 for", "self._weapons) return False @property_immutable_cache def air_dps(self) -> Union[int, float]: \"\"\"", "Any, Dict, List, Optional, Set, Tuple, Union, TYPE_CHECKING from .cache", "target) <= cast_range + self.radius + bonus_distance ) return False", "shield. \"\"\" return self._proto.shield_upgrade_level @property def buff_duration_remain(self) -> int: \"\"\"", "bool: \"\"\" Checks if the unit has the 'biological' attribute.", "ARE NOT POPULATED FOR ENEMIES @property_mutable_cache def orders(self) -> List[UnitOrder]:", "def return_resource(self, target: Unit = None, queue: bool = False)", "or returning. Only works for own units. \"\"\" return self.is_using_ability(IS_COLLECTING)", "UnitCommand: \"\"\" Orders the unit to return resource. Does not", "if the unit has the 'light' attribute. \"\"\" return IS_LIGHT", "radius(self) -> Union[int, float]: \"\"\" Half of unit size. See", "mineral patch or a gas extraction building. :param target: :param", "bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self) -> Point2: \"\"\" If unit is", "the unit is cloaked. \"\"\" return self._proto.cloak in IS_CLOAKED @property", "def noqueue(self) -> bool: \"\"\" Checks if the unit is", "the same as OrbitalCommand For flying OrbitalCommand, this returns UnitTypeId.OrbitalCommand", "is_attacking(self) -> bool: \"\"\" Checks if the unit is attacking.", "patrol between position it has when the command starts and", "attribute. \"\"\" return IS_LIGHT in self._type_data.attributes @property def is_armored(self) ->", "abilities: Union[AbilityId, Set[AbilityId]]) -> bool: \"\"\" Check if the unit", "vespene geyser to mine. Only works for own units. \"\"\"", "has vision of the position of the unit! It does", "weapon.type in TARGET_AIR), None) if weapon: return (weapon.damage * weapon.attacks)", "def cargo_size(self) -> Union[float, int]: \"\"\" Returns the amount of", "the range against air units. Does not include upgrades. \"\"\"", "Only works for own units. \"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def", "own units. \"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self) -> bool:", "a mineral field. \"\"\" return self._type_data.has_minerals @property def is_vespene_geyser(self) ->", "attribute. \"\"\" return IS_MASSIVE in self._type_data.attributes @property def is_psionic(self) ->", "if self.can_attack_ground and not target.is_flying: unit_attack_range = self.ground_range elif self.can_attack_air", "if the unit is currently selected. \"\"\" return self._proto.is_selected @property", "the position of the unit! It does not give any", "Union[int, float]: \"\"\" Returns the percentage of health the unit", "\"\"\" return self._proto.energy @property def energy_max(self) -> Union[int, float]: \"\"\"", "to 'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE, *args, **kwargs) def hold_position(self, queue:", "== UNIT_ORACLE: return 4 if self.type_id == UNIT_BATTLECRUISER: return 6", "@property_immutable_cache def position(self) -> Point2: \"\"\" Returns the 2d position", "return self._proto.shield_upgrade_level @property def buff_duration_remain(self) -> int: \"\"\" Returns the", "for an ability ({ability_id}) that has no cast range\" ability_target_type", "self._proto.energy / self._proto.energy_max @property def is_snapshot(self) -> bool: \"\"\" Checks", "self.air_range else: return False return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius", "if the unit is burrowed. \"\"\" return self._proto.is_burrowed @property def", "proto.progress, ) def __init__(self, ability: AbilityData, target, progress: float =", "units. \"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self) -> bool: \"\"\"", "Checks if a worker is carrying a resource. \"\"\" return", "is currently training or researching. \"\"\" return self._proto.is_active # PROPERTIES", "own hallucination or detected. \"\"\" return self._proto.is_hallucination @property def attack_upgrade_level(self)", "\"\"\" return self._proto.is_active # PROPERTIES BELOW THIS COMMENT ARE NOT", "Returns the weapons of the unit. \"\"\" try: return self._type_data._proto.weapons", "self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self) -> bool: \"\"\" Checks if a", "BuffId\" return buff in self.buffs def train(self, unit: UnitTypeId, queue:", "Tuple, Union, TYPE_CHECKING from .cache import property_immutable_cache, property_mutable_cache from .constants", "the given abilities. Only works for own units. \"\"\" if", "\"\"\" Returns the detection distance of the unit. \"\"\" return", "UNIT_COLOSSUS): unit_attack_range = self.air_range else: return False return ( self._bot_object._distance_squared_unit_to_unit(self,", "Checks if the unit is detected by a sensor tower.", "stop(self, queue: bool = False) -> UnitCommand: \"\"\" Orders a", "target.radius + bonus_distance) ** 2 ) # For casting abilities", "for units without energy. \"\"\" return self._proto.energy @property def energy_max(self)", "timer bar. # NOTE: Returns 0 for units without a", "of workers currently gathering resources at a geyser or mining", "def energy_max(self) -> Union[int, float]: \"\"\" Returns the maximum amount", "bool: \"\"\" Checks if the unit is currently selected. \"\"\"", "cast an ability on the target without checking ability cooldown", "mineral_contents(self) -> int: \"\"\" Returns the amount of minerals remaining", "bool: \"\"\" Checks if a worker is carrying vespene gas.", "target tag (if it is a Unit) or Point2 (if", "\"\"\" if self._proto.energy_max == 0: return 0 return self._proto.energy /", "target: :param queue: \"\"\" return self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def move(self,", "if the unit is a structure. \"\"\" return IS_STRUCTURE in", "None \"\"\" return self._type_data.unit_alias @property_immutable_cache def _weapons(self): \"\"\" Returns the", "the tag of the addon of unit. \"\"\" return self._proto.add_on_tag", "@property_immutable_cache def is_transforming(self) -> bool: \"\"\" Checks if the unit", "For casting abilities on the ground, like queen creep tumor,", "unit to stop, but can start to move on its", "more than one space. \"\"\" return self._proto.cargo_space_taken @property def has_cargo(self)", "bool: \"\"\" Checks if the unit is completed. \"\"\" return", "float]: \"\"\" Returns the range against air units. Does not", "removed soon, please use is_idle instead\", DeprecationWarning, stacklevel=2) return self.is_idle", "Only works for own units. \"\"\" return self.is_using_ability(IS_REPAIRING) @property def", "@property_immutable_cache def add_on_land_position(self) -> Point2: \"\"\" If unit is addon", "unit has any units loaded. \"\"\" return bool(self._proto.cargo_space_taken) @property def", "Returns the amount of minerals remaining in a mineral field.", "f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class Unit: def __init__(self, proto_data, bot_object: BotAI):", "until the unit can fire again, returns -1 for units", "units without a timer bar. \"\"\" return self._proto.buff_duration_max # PROPERTIES", "cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress, ) def", "will be removed soon, please use is_idle instead\", DeprecationWarning, stacklevel=2)", ":param position: :param queue: \"\"\" return self(AbilityId.MOVE_MOVE, target=position, queue=queue) def", "If unit is addon (techlab or reactor), returns the position", "gathering resources at a geyser or mining base.\"\"\" return self._proto.assigned_harvesters", "-> UnitCommand: \"\"\" Deprecated: This ability redirects to 'AbilityId.ATTACK' \"\"\"", "cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range > 0, f\"Checking for an", "__hash__(self): return self.tag def __eq__(self, other): try: return self.tag ==", "weapon.type in TARGET_GROUND), None) if weapon: return weapon.range return 0", "-> bool: \"\"\" Checks if the unit can attack ground", "screen. \"\"\" return self._proto.is_on_screen @property def is_blip(self) -> bool: \"\"\"", "Does not need a 'target'. :param target: :param queue: \"\"\"", "-> UnitCommand: \"\"\" Orders Warpgate to warp in 'unit' at", "bool: \"\"\" Checks if the unit is a non-empty vespene", "return self._proto.alliance @property def is_mine(self) -> bool: \"\"\" Checks if", "in self._type_data.attributes @property def is_psionic(self) -> bool: \"\"\" Checks if", "alliance(self) -> Alliance: \"\"\" Returns the team the unit belongs", "is a value of 1 or 2 in a two", "def is_biological(self) -> bool: \"\"\" Checks if the unit has", "Race(self._type_data._proto.race) @property def tag(self) -> int: \"\"\" Returns the unique", "-> bool: \"\"\" Checks if unit has an addon attached.", ":param queue: \"\"\" return self(AbilityId.ATTACK, target=target, queue=queue) def gather(self, target:", "returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV, this returns None \"\"\" return", "def is_on_screen(self) -> bool: \"\"\" Checks if the unit is", "is_blip(self) -> bool: \"\"\" Checks if the unit is detected", "@property_immutable_cache def is_patrolling(self) -> bool: \"\"\" Checks if a unit", "\"\"\" return self._proto.energy_max @property def energy_percentage(self) -> Union[int, float]: \"\"\"", "units that can't attack. Usage: if unit.weapon_cooldown == 0: self.actions.append(unit.attack(target))", "-> bool: \"\"\" Checks if a worker is carrying a", "positive int if unit has too many harvesters mining, a", "building has to land to connect to addon \"\"\" return", "mineral field. \"\"\" return self._type_data.has_minerals @property def is_vespene_geyser(self) -> bool:", "the team the unit belongs to. \"\"\" return self._proto.alliance @property", "unit has. Returns 0 for units without energy. \"\"\" return", "target=repair_target, queue=queue) def __hash__(self): return self.tag def __eq__(self, other): try:", "one space. \"\"\" return self._proto.cargo_space_taken @property def has_cargo(self) -> bool:", "be a mineral patch or a gas extraction building. :param", "build_progress(self) -> Union[int, float]: \"\"\" Returns completion in range [0,1].\"\"\"", "3 for gas buildings, 2*n for n mineral patches on", "0 ) -> bool: \"\"\" Test if a unit is", "move on its own if it is attacked, enemy unit", "flying OrbitalCommand, this returns UnitTypeId.OrbitalCommand For SCV, this returns None", "have. Returns 0 for units without energy. \"\"\" return self._proto.energy_max", "the unit is a non-empty vespene geyser or gas extraction", "unit is patrolling. Only works for own units. \"\"\" return", "bool: \"\"\" Checks if the unit has the 'massive' attribute.", "def from_proto(cls, proto, bot_object: BotAI): return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if", "unit has. Returns 0 for units without energy. \"\"\" if", "Union[Point2, Point3], queue: bool = False) -> UnitCommand: \"\"\" Orders", "{BuffId(buff_id) for buff_id in self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self) -> bool:", "== 0: return 0 return self._proto.shield / self._proto.shield_max @property def", "amount of energy the unit can have. Returns 0 for", "last point is the same as the starting point, the", "position_tuple(self) -> Tuple[float, float]: \"\"\" Returns the 2d position of", "self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range + self.radius + target.radius + bonus_distance)", "self._type_data.cargo_size @property def cargo_max(self) -> Union[float, int]: \"\"\" How much", "def position_tuple(self) -> Tuple[float, float]: \"\"\" Returns the 2d position", "self._proto.buff_duration_max # PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED FOR", "target=target, queue=queue) def move(self, position: Union[Point2, Point3], queue: bool =", "can fire again, returns -1 for units that can't attack.", "buff: BuffId) -> bool: \"\"\" Checks if unit has buff", "Orders Warpgate to warp in 'unit' at 'position'. :param unit:", "is_burrowed(self) -> bool: \"\"\" Checks if the unit is burrowed.", "float]: \"\"\" Returns the 2d position of the unit as", "include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return True if", "(Point2, tuple) ): return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range +", "bool: \"\"\" Checks if the unit is revealed or not", "def buff_duration_max(self) -> int: \"\"\" Returns the maximum amount of", "a timer bar. \"\"\" return self._proto.buff_duration_max # PROPERTIES BELOW THIS", "terran building has to land to connect to addon \"\"\"", "@property def alliance(self) -> Alliance: \"\"\" Returns the team the", "False) -> UnitCommand: \"\"\" Orders a unit to gather minerals", "Target can be a Unit (to follow that unit) or", "is returning from mineral field or vespene geyser to deliver", "way to a mineral field or vespene geyser to mine.", "\"\"\" return self._proto.cloak is IS_REVEALED @property def can_be_attacked(self) -> bool:", "property_immutable_cache, property_mutable_cache from .constants import ( transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED,", "Union[int, float]: \"\"\" Returns the dps against ground units. Does", "def build_progress(self) -> Union[int, float]: \"\"\" Returns completion in range", "the target tag (if it is a Unit) or Point2", "bool: \"\"\" Checks if the unit is an SCV or", "for own units. \"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self) ->", "except: return None @property_immutable_cache def can_attack(self) -> bool: \"\"\" Checks", "to attack. Target can be a Unit or Point2. Attacking", "@property def has_add_on(self) -> bool: \"\"\" Checks if unit has", "Checks if a worker is carrying vespene gas. \"\"\" return", "currently left in the unit. \"\"\" return self._proto.cargo_space_max - self._proto.cargo_space_taken", "p) ** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self, target: Unit,", "Returns the number of workers currently gathering resources at a", "visible timer bar. # NOTE: Returns 0 for units without", "Unit or Point2. Attacking a position will make the unit", "queue=queue) def scan_move(self, *args, **kwargs) -> UnitCommand: \"\"\" Deprecated: This", "\"\"\" return IS_MASSIVE in self._type_data.attributes @property def is_psionic(self) -> bool:", "a shield. \"\"\" return self._proto.shield_upgrade_level @property def buff_duration_remain(self) -> int:", "next((weapon for weapon in self._weapons if weapon.type in TARGET_GROUND), None)", "in proto?! return bool(self._weapons) or self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache", "unit is cloaked. \"\"\" return self._proto.cloak in IS_CLOAKED @property def", "patches on that base.\"\"\" return self._proto.ideal_harvesters @property def surplus_harvesters(self) ->", "unit: :param queue: \"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position)", "a value of 1 or 2 in a two player", "the unit is on the screen. \"\"\" return self._proto.is_on_screen @property", "target other units, like transfuse, feedback, snipe, yamato if ability_target_type", "self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_ground: weapon = next((weapon", "== IS_VISIBLE @property def alliance(self) -> Alliance: \"\"\" Returns the", "orders. \"\"\" return [UnitOrder.from_proto(order, self._bot_object) for order in self._proto.orders] @property_immutable_cache", "\"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self) -> bool: \"\"\" Checks", "Checks if the unit has the 'light' attribute. \"\"\" return", "the unit is a structure. \"\"\" return IS_STRUCTURE in self._type_data.attributes", "feedback, snipe, yamato if ability_target_type in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target,", "progress: float = None): \"\"\" :param ability: :param target: :param", "bonus_damage(self): \"\"\" Returns a tuple of form '(bonus damage, armor", "Checks if the unit has the 'massive' attribute. \"\"\" return", "def can_attack_both(self) -> bool: \"\"\" Checks if the unit can", "@property_immutable_cache def bonus_damage(self): \"\"\" Returns a tuple of form '(bonus", "Checks if the unit is attacking. Only works for own", "the 'armored' attribute. \"\"\" return IS_ARMORED in self._type_data.attributes @property def", "in self._weapons) return False @property_immutable_cache def can_attack_ground(self) -> bool: \"\"\"", "upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_air:", "be powered. \"\"\" return self.is_ready and (self.type_id in IS_DETECTOR or", "bool: \"\"\" Checks if the unit is an SCV that", "position: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def research(self,", "self._weapons) return False @property_immutable_cache def ground_dps(self) -> Union[int, float]: \"\"\"", "\"\"\" return self._proto.alliance @property def is_mine(self) -> bool: \"\"\" Checks", "range. Includes the target's radius when calculating distance to target.", "is facing as a float in range [0,2π). 0 is", "the unit has the 'massive' attribute. \"\"\" return IS_MASSIVE in", "geyser. \"\"\" return self._proto.vespene_contents @property def has_vespene(self) -> bool: \"\"\"", "\"\"\" return self._proto.shield @property def shield_max(self) -> Union[int, float]: \"\"\"", "or MULE that is currently repairing. Only works for own", "Checks if the target is in range. Includes the target's", "def cargo_used(self) -> Union[float, int]: \"\"\" Returns how much cargo", "Orders unit to build another 'unit' at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER,", "unit can attack ground units. \"\"\" if self.type_id in {UNIT_BATTLECRUISER,", "self._weapons if weapon.type in TARGET_AIR), None) if weapon: return weapon.range", "or WarpPrism. \"\"\" return {unit.tag for unit in self._proto.passengers} @property", "a tuple of form '(bonus damage, armor type)' if unit", "False @property def facing(self) -> Union[int, float]: \"\"\" Returns direction", "pass @property def radius(self) -> Union[int, float]: \"\"\" Half of", "is_transforming(self) -> bool: \"\"\" Checks if the unit transforming. Only", "self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self, target:", "the unit is only available as a snapshot for the", "is_flying(self) -> bool: \"\"\" Checks if the unit is flying.", "IS_DETECTOR or self.type_id == UNIT_PHOTONCANNON and self.is_powered) @property def radar_range(self)", "or gas extraction building. \"\"\" return self._type_data.has_vespene @property def health(self)", "the fog of war or attacking enemy units on higher,", "is idle \"\"\" if self.orders: if isinstance(self.orders[0].target, int): return self.orders[0].target", "@property def cargo_size(self) -> Union[float, int]: \"\"\" Returns the amount", "\"\"\" return IS_STRUCTURE in self._type_data.attributes @property def is_light(self) -> bool:", "Returns the upgrade level of the units attack. # NOTE:", "-> List[UnitOrder]: \"\"\" Returns the a list of the current", "-> bool: \"\"\" Checks if the unit is only available", "planetary fortress etc.) have a little lower range than this", "if a unit is gathering or returning. Only works for", "'armored' attribute. \"\"\" return IS_ARMORED in self._type_data.attributes @property def is_biological(self)", "\"\"\" return self._proto.is_blip @property def is_powered(self) -> bool: \"\"\" Checks", "isinstance(abilities, AbilityId): abilities = {abilities} return self.orders[0].ability.id in abilities @property_immutable_cache", "-1 for units that can't attack. Usage: if unit.weapon_cooldown ==", "UnitCommand: \"\"\" Orders unit to build another 'unit' at 'position'.", "TargetType, warpgate_abilities, TargetType, Target from .ids.ability_id import AbilityId from .ids.buff_id", "is_biological(self) -> bool: \"\"\" Checks if the unit has the", "IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES,", "attacked, enemy unit is in range or other friendly units", "visible ground appear this way. \"\"\" return self._proto.display_type == IS_SNAPSHOT", "def is_snapshot(self) -> bool: \"\"\" Checks if the unit is", "self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return True if self._weapons: return any(weapon.type", "buff_duration_remain(self) -> int: \"\"\" Returns the amount of remaining frames", ":param position: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def", "Point2. :param position: :param queue: \"\"\" return self(AbilityId.MOVE_MOVE, target=position, queue=queue)", "unit is visible for the bot. NOTE: This means the", "without conversion to Point2. \"\"\" return self._proto.pos.x, self._proto.pos.y @property_immutable_cache def", "-> Set: \"\"\" Returns the set of current buffs the", "UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from .data import Alliance, Attribute, CloakState, DisplayType,", "\"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self) -> Point3: \"\"\" Returns", "returns None \"\"\" return self._type_data.tech_alias @property def unit_alias(self) -> Optional[UnitTypeId]:", "0 for units without a timer bar. \"\"\" return self._proto.buff_duration_max", "p: \"\"\" if isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5", "revealed. \"\"\" return self._proto.cloak is IS_REVEALED @property def can_be_attacked(self) ->", "\"\"\" Returns the percentage of health the unit has. Does", "air attack at all. Does not include upgrades. \"\"\" if", "Returns the armor of the unit. Does not include upgrades", "position: Union[Point2, Point3], queue: bool = False) -> UnitCommand: \"\"\"", "is your own hallucination or detected. \"\"\" return self._proto.is_hallucination @property", "(to follow that unit) or Point2. :param position: :param queue:", "self._type_data.attributes @property def is_biological(self) -> bool: \"\"\" Checks if the", "@property def assigned_harvesters(self) -> int: \"\"\" Returns the number of", "ability cooldown (like stalker blink) or if ability is made", "MULE to repair. :param repair_target: :param queue: \"\"\" return self(AbilityId.EFFECT_REPAIR,", "unit has the 'light' attribute. \"\"\" return IS_LIGHT in self._type_data.attributes", "[UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV, this returns None \"\"\" return self._type_data.tech_alias", "units. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return True if self._weapons:", "ability_id: :param target: :param bonus_distance: \"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert", "powered. \"\"\" return self.is_ready and (self.type_id in IS_DETECTOR or self.type_id", "much cargo space is currently used in the unit. Note", "self._proto.cargo_space_taken @property def assigned_harvesters(self) -> int: \"\"\" Returns the number", "it has too few mining.\"\"\" return self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache", "def has_add_on(self) -> bool: \"\"\" Checks if unit has an", "Point2.from_proto(self.orders[0].target) return None @property def noqueue(self) -> bool: \"\"\" Checks", "Warpgate to warp in 'unit' at 'position'. :param unit: :param", "return self._proto.pos.x, self._proto.pos.y @property_immutable_cache def position(self) -> Point2: \"\"\" Returns", "if a worker is carrying vespene gas. \"\"\" return not", "\"\"\" return IS_LIGHT in self._type_data.attributes @property def is_armored(self) -> bool:", "the time until the unit can fire again, returns -1", "ravager bile, HT storm if ability_target_type in {Target.Point.value, Target.PointOrUnit.value} and", "field. \"\"\" return self._proto.mineral_contents @property def vespene_contents(self) -> int: \"\"\"", "attack(self, target: Union[Unit, Point2, Point3], queue: bool = False) ->", "self._proto.health @property def health_max(self) -> Union[int, float]: \"\"\" Returns the", "tower. \"\"\" return self._proto.is_blip @property def is_powered(self) -> bool: \"\"\"", "for own units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self) ->", "are in the fog of war or attacking enemy units", "\"\"\" Returns the shield points the unit has. Returns 0", "return False @property_immutable_cache def can_attack_ground(self) -> bool: \"\"\" Checks if", "is_mechanical(self) -> bool: \"\"\" Checks if the unit has the", "from .ids.ability_id import AbilityId from .ids.buff_id import BuffId from .ids.upgrade_id", "unit_type not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache", "True if self._weapons: return any(weapon.type in TARGET_BOTH for weapon in", "target: Union[Unit, Point2], bonus_distance: float = 0 ) -> bool:", "target, (Point2, tuple) ): return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range", "the unit is a detector. Has to be completed in", "else: return Point2.from_proto(self.orders[0].target) return None @property def noqueue(self) -> bool:", "the space. :param queue: \"\"\" return self(AbilityId.STOP, queue=queue) def patrol(self,", "bool: \"\"\" Checks if the unit is revealed. \"\"\" return", "Union[int, float]: \"\"\" Returns the percentage of shield points the", "Checks if a worker or MULE is carrying (gold-)minerals. \"\"\"", "\"\"\" Returns the amount of gas remaining in a geyser.", "Returns the upgrade level of the units armor. \"\"\" return", "of the unit. This is a value of 1 or", "is currently building. Only works for own units. \"\"\" return", "is burrowed. \"\"\" return self._proto.is_burrowed @property def is_hallucination(self) -> bool:", "return False if isinstance(abilities, AbilityId): abilities = {abilities} return self.orders[0].ability.id", "/ weapon.speed return 0 @property_immutable_cache def air_range(self) -> Union[int, float]:", "units with ability attacks (Oracle, Baneling) or multiple attacks (Thor).", "def radar_range(self) -> Union[int, float]: return self._proto.radar_range @property def is_selected(self)", "repairing. Only works for own units. \"\"\" return self.is_using_ability(IS_REPAIRING) @property", "if a worker or MULE is carrying (gold-)minerals. \"\"\" return", "self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position) def attack(self, target: Union[Unit, Point2, Point3],", "unit def is_facing_unit(self, other_unit: Unit, angle_error: float = 1e-3) ->", "to deliver resources to townhall. Only works for own units.", "ability redirects to 'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE, *args, **kwargs) def", "BELOW THIS COMMENT ARE NOT POPULATED FOR SNAPSHOTS @property def", "a unit to stop moving. It will not move until", "unit is a non-empty vespene geyser or gas extraction building.", "the unit is idle \"\"\" if self.orders: if isinstance(self.orders[0].target, int):", "-> bool: \"\"\" Checks if the unit is hostile. \"\"\"", "\"\"\" Checks if the target is in range. Includes the", "Checks if a unit is patrolling. Only works for own", "the unit transforming. Only works for own units. \"\"\" return", "unit is facing another unit def is_facing_unit(self, other_unit: Unit, angle_error:", "up more than one space. \"\"\" return self._proto.cargo_space_taken @property def", "has when the command starts and the target position. Can", "COMMENT ARE NOT POPULATED FOR ENEMIES @property_mutable_cache def orders(self) ->", "{abilities} return self.orders[0].ability.id in abilities @property_immutable_cache def is_moving(self) -> bool:", "in TARGET_GROUND for weapon in self._weapons) return False @property_immutable_cache def", "bool: \"\"\" Checks if a worker is carrying a resource.", "warp in 'unit' at 'position'. :param unit: :param queue: \"\"\"", "{Target.Point.value, Target.PointOrUnit.value} and isinstance( target, (Point2, tuple) ): return (", "to mine. Only works for own units. \"\"\" return self.is_using_ability(IS_GATHERING)", "damage' against 'armor type'. Possible armor typs are: 'Light', 'Armored',", "return self._proto.energy @property def energy_max(self) -> Union[int, float]: \"\"\" Returns", "queue: \"\"\" return self(AbilityId.ATTACK, target=target, queue=queue) def gather(self, target: Unit,", "self._bot_object = bot_object # Used by property_immutable_cache self.cache = {}", "unit does 'bonus damage' against 'armor type'. Possible armor typs", "Returns cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak @property def", "isinstance(buff, BuffId), f\"{buff} is no BuffId\" return buff in self.buffs", "Orders a unit to stop moving. It will not move", "or self.type_id == UNIT_PHOTONCANNON and self.is_powered) @property def radar_range(self) ->", "\"\"\" Using the 2d distance between self and p. To", "self._proto.alliance == IS_ENEMY @property def owner_id(self) -> int: \"\"\" Returns", "@property_immutable_cache def can_attack(self) -> bool: \"\"\" Checks if the unit", "enemy units on higher, not visible ground appear this way.", "a unit is patrolling. Only works for own units. \"\"\"", "self(AbilityId.PATROL, target=position, queue=queue) def repair(self, repair_target: Unit, queue: bool =", "TARGET_AIR), None) if weapon: return (weapon.damage * weapon.attacks) / weapon.speed", "Unit, queue: bool = False) -> UnitCommand: \"\"\" Orders a", "if the unit is an SCV or MULE that is", "if weapon.damage_bonus: b = weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name) else: return", "= 0) -> bool: \"\"\" Checks if the target is", "calculating distance to target. :param target: :param bonus_distance: \"\"\" #", "NOTE: Returns 0 for units without a weapon. \"\"\" return", "can_attack_ground(self) -> bool: \"\"\" Checks if the unit can attack", "include shields. \"\"\" if self._proto.health_max == 0: return 0 return", "= next((weapon for weapon in self._weapons if weapon.type in TARGET_AIR),", "int: \"\"\" Returns the amount of remaining frames of the", "unit is idle. \"\"\" warnings.warn(\"noqueue will be removed soon, please", "\"\"\" return self(AbilityId.PATROL, target=position, queue=queue) def repair(self, repair_target: Unit, queue:", "of form '(bonus damage, armor type)' if unit does 'bonus", "for units without a weapon. \"\"\" return self._proto.attack_upgrade_level @property def", "is_mine(self) -> bool: \"\"\" Checks if the unit is controlled", "target: Unit, queue: bool = False) -> UnitCommand: \"\"\" Orders", "upgrade level of the units armor. \"\"\" return self._proto.armor_upgrade_level @property", "or other friendly units need the space. :param queue: \"\"\"", "the 2d position of the unit as tuple without conversion", "target position. Can be queued up to seven patrol points.", "return self._proto.ideal_harvesters @property def surplus_harvesters(self) -> int: \"\"\" Returns a", "IS_MASSIVE in self._type_data.attributes @property def is_psionic(self) -> bool: \"\"\" Checks", "the unit belongs to. \"\"\" return self._proto.alliance @property def is_mine(self)", "proto.target_unit_tag), proto.progress, ) def __init__(self, ability: AbilityData, target, progress: float", "Union[int, float]: \"\"\" Returns the range against ground units. Does", "if self._proto.shield_max == 0: return 0 return self._proto.shield / self._proto.shield_max", "cargo_used(self) -> Union[float, int]: \"\"\" Returns how much cargo space", "return self._type_data.unit_alias @property_immutable_cache def _weapons(self): \"\"\" Returns the weapons of", "redirects to 'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE, *args, **kwargs) def hold_position(self,", "everything on its way. :param target: :param queue: \"\"\" return", "== IS_SNAPSHOT @property def is_visible(self) -> bool: \"\"\" Checks if", "self.can_attack_air and (target.is_flying or target.type_id == UNIT_COLOSSUS): unit_attack_range = self.air_range", "the unit is currently training or researching. \"\"\" return self._proto.is_active", "@property_immutable_cache def air_dps(self) -> Union[int, float]: \"\"\" Returns the dps", "of the unit. Does not include upgrades or buffs. \"\"\"", "UnitCommand: \"\"\" Deprecated: This ability redirects to 'AbilityId.ATTACK' \"\"\" return", "moving. It will not move until it gets new orders.", "amount of minerals remaining in a mineral field. \"\"\" return", "units. \"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self) -> bool: \"\"\"", "False def __call__(self, ability, target=None, queue: bool = False): return", "percentage of health the unit has. Does not include shields.", "health of the unit. Does not include shields. \"\"\" return", "of current buffs the unit has. \"\"\" return {BuffId(buff_id) for", "-> UnitCommand: \"\"\" Orders unit to build another 'unit' at", "units. \"\"\" return self.type_id in transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def", "the unit is revealed. \"\"\" return self._proto.cloak is IS_REVEALED @property", "return False def __call__(self, ability, target=None, queue: bool = False):", "self._proto.cargo_space_taken @property def has_cargo(self) -> bool: \"\"\" Checks if this", "the target without checking ability cooldown (like stalker blink) or", "return self._proto.shield / self._proto.shield_max @property def energy(self) -> Union[int, float]:", "range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting abilities that target", "def can_be_attacked(self) -> bool: \"\"\" Checks if the unit is", "revealed or not cloaked and therefore can be attacked. \"\"\"", "field or vespene geyser to deliver resources to townhall. Only", "available through research (like HT storm). :param ability_id: :param target:", "the unit is idle. \"\"\" warnings.warn(\"noqueue will be removed soon,", "\"\"\" Checks if the unit can attack at all. \"\"\"", "import UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING: from .bot_ai import BotAI from", "def type_id(self) -> UnitTypeId: \"\"\" UnitTypeId found in sc2/ids/unit_typeid. Caches", "\"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self) -> bool: \"\"\" Checks", "Returns the percentage of amount of energy the unit has.", "'position'. Target can be a Unit (to follow that unit)", "speed of the unit. Does not include upgrades or buffs.", "0 @property_immutable_cache def air_range(self) -> Union[int, float]: \"\"\" Returns the", "@property def is_hallucination(self) -> bool: \"\"\" Returns True if the", "bool: \"\"\" Checks if unit is idle. \"\"\" return not", "\"\"\" return self._type_data.cargo_size @property def cargo_max(self) -> Union[float, int]: \"\"\"", "any(weapon.type in TARGET_BOTH for weapon in self._weapons) return False @property_immutable_cache", "\"\"\" Returns the armor of the unit. Does not include", "Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue)", "for units without energy. \"\"\" if self._proto.energy_max == 0: return", "if the unit is currently training or researching. \"\"\" return", "has the 'mechanical' attribute. \"\"\" return IS_MECHANICAL in self._type_data.attributes @property", "carrying vespene gas. \"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self)", "Checks if a unit is gathering or returning. Only works", "= self.air_range else: return False return ( self._bot_object._distance_squared_unit_to_unit(self, target) <=", "\"\"\" return self.type_id in transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self)", "Only works for own units. \"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def", "UNIT_ORACLE: return 4 if self.type_id == UNIT_BATTLECRUISER: return 6 if", "in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit): return ( self._bot_object._distance_squared_unit_to_unit(self, target)", "researching. \"\"\" return self._proto.is_active # PROPERTIES BELOW THIS COMMENT ARE", "target=position) def attack(self, target: Union[Unit, Point2, Point3], queue: bool =", "return False @property def facing(self) -> Union[int, float]: \"\"\" Returns", "POPULATED FOR SNAPSHOTS @property def mineral_contents(self) -> int: \"\"\" Returns", "def distance_to(self, p: Union[Unit, Point2, Point3]) -> Union[int, float]: \"\"\"", "if the unit is on the screen. \"\"\" return self._proto.is_on_screen", "bonus_distance: Union[int, float] = 0) -> bool: \"\"\" Checks if", "Union[int, float]: \"\"\" Returns the percentage of amount of energy", "SNAPSHOTS @property def mineral_contents(self) -> int: \"\"\" Returns the amount", "UNIT_PHOTONCANNON and self.is_powered) @property def radar_range(self) -> Union[int, float]: return", "position)) :param unit: :param position: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id,", "\"\"\" return self._proto.is_powered @property def is_active(self) -> bool: \"\"\" Checks", "queue=queue) def patrol(self, position: Union[Point2, Point3], queue: bool = False)", "of war or attacking enemy units on higher, not visible", ".unit_command import UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING: from .bot_ai import BotAI", "if weapon.type in TARGET_AIR), None) if weapon: return weapon.range return", "by a sensor tower. \"\"\" return self._proto.is_blip @property def is_powered(self)", "return self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self) -> int: \"\"\" Returns the", "it is a Unit) or Point2 (if it is a", "unit! It does not give any information about the cloak", "UnitCommand: \"\"\" Orders unit to research 'upgrade'. Requires UpgradeId to", "Checks if unit has buff 'buff'. \"\"\" assert isinstance(buff, BuffId),", "return bool(self._proto.cargo_space_taken) @property def cargo_size(self) -> Union[float, int]: \"\"\" Returns", "False @property_immutable_cache def air_dps(self) -> Union[int, float]: \"\"\" Returns the", "visible for the bot. NOTE: This means the bot has", "IS_ENEMY @property def owner_id(self) -> int: \"\"\" Returns the owner", "of the unit! It does not give any information about", "attacks (Oracle, Baneling) or multiple attacks (Thor). if self._weapons: for", "def surplus_harvesters(self) -> int: \"\"\" Returns a positive int if", "elif unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack:", "Provides the unit type data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property def", "def is_mine(self) -> bool: \"\"\" Checks if the unit is", "in self._type_data.attributes @property def is_light(self) -> bool: \"\"\" Checks if", "units. \"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self) -> bool: \"\"\"", "False) -> UnitCommand: \"\"\" Orders unit to attack. Target can", "False @property_immutable_cache def ground_dps(self) -> Union[int, float]: \"\"\" Returns the", "if weapon.type in TARGET_AIR), None) if weapon: return (weapon.damage *", "has. Returns 0 for non-protoss units. \"\"\" if self._proto.shield_max ==", "if the unit is visible for the bot. NOTE: This", "does not give any information about the cloak status of", "has the 'psionic' attribute. \"\"\" return IS_PSIONIC in self._type_data.attributes @property", "of the position of the unit! It does not give", "@property def is_flying(self) -> bool: \"\"\" Checks if the unit", "\"\"\" Orders Warpgate to warp in 'unit' at 'position'. :param", "queue: bool = False) -> UnitCommand: \"\"\" Order an SCV", "for unit in self._proto.passengers} @property def cargo_used(self) -> Union[float, int]:", "return self(AbilityId.HOLDPOSITION, queue=queue) def stop(self, queue: bool = False) ->", "the units inside a Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord", "def facing(self) -> Union[int, float]: \"\"\" Returns direction the unit", "\"\"\" return self._proto.armor_upgrade_level @property def shield_upgrade_level(self) -> int: \"\"\" Returns", "bool: \"\"\" Checks if the unit has the 'light' attribute.", "if the unit has the 'psionic' attribute. \"\"\" return IS_PSIONIC", "units, like transfuse, feedback, snipe, yamato if ability_target_type in {Target.Unit.value,", "level of the units shield. # NOTE: Returns 0 for", "return self._proto.radar_range @property def is_selected(self) -> bool: \"\"\" Checks if", "\"\"\" Returns the number of workers currently gathering resources at", "or self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self) -> bool: \"\"\" Checks if", "fog of war or attacking enemy units on higher, not", "a Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism. \"\"\"", "of the unit. \"\"\" try: return self._type_data._proto.weapons except: return None", "if self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_air: weapon =", "is facing another unit def is_facing_unit(self, other_unit: Unit, angle_error: float", "can't build extractors on empty geysers. \"\"\" return bool(self._proto.vespene_contents) @property", "@property def surplus_harvesters(self) -> int: \"\"\" Returns a positive int", "return None @property def armor(self) -> Union[int, float]: \"\"\" Returns", "def health(self) -> Union[int, float]: \"\"\" Returns the health of", "in self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self) -> bool: \"\"\" Checks if", "self._weapons) return False @property_immutable_cache def can_attack_ground(self) -> bool: \"\"\" Checks", "unit is idle. \"\"\" return not self._proto.orders def is_using_ability(self, abilities:", "int: # TODO What does this do? return self._proto.engaged_target_tag #", "Returns the ideal harverster count for unit. 3 for gas", "enemy unit is in range or other friendly units need", "for own units. \"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self) ->", "def __call__(self, ability, target=None, queue: bool = False): return UnitCommand(ability,", "list of the current orders. \"\"\" return [UnitOrder.from_proto(order, self._bot_object) for", "unit will patrol in a circle. :param position: :param queue:", "None \"\"\" return self._type_data.tech_alias @property def unit_alias(self) -> Optional[UnitTypeId]: \"\"\"", "== UNIT_BATTLECRUISER: return True if self._weapons: return any(weapon.type in TARGET_BOTH", "self._bot_object) for order in self._proto.orders] @property_immutable_cache def order_target(self) -> Optional[Union[int,", "return self._proto.display_type == IS_SNAPSHOT @property def is_visible(self) -> bool: \"\"\"", "self._proto.mineral_contents @property def vespene_contents(self) -> int: \"\"\" Returns the amount", "cargo_max(self) -> Union[float, int]: \"\"\" How much cargo space is", "'Target' must be a mineral patch or a gas extraction", "has buff 'buff'. \"\"\" assert isinstance(buff, BuffId), f\"{buff} is no", "bot. \"\"\" return self._proto.alliance == IS_MINE @property def is_enemy(self) ->", "SCV or MULE that is currently repairing. Only works for", "has_cargo(self) -> bool: \"\"\" Checks if this unit has any", ":param upgrade: :param queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self,", "Returns the dps against ground units. Does not include upgrades.", "return self(warpgate_abilities[normal_creation_ability], target=position) def attack(self, target: Union[Unit, Point2, Point3], queue:", "not include upgrades or buffs. \"\"\" return self._type_data._proto.movement_speed @property def", ":param ability_id: :param target: :param bonus_distance: \"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range", "unit to move to 'position'. Target can be a Unit", "buildings, 2*n for n mineral patches on that base.\"\"\" return", "attribute. \"\"\" return IS_PSIONIC in self._type_data.attributes @property def tech_alias(self) ->", "return self(AbilityId.MOVE_MOVE, target=position, queue=queue) def scan_move(self, *args, **kwargs) -> UnitCommand:", "the same unit type. \"\"\" unit_type = self._proto.unit_type if unit_type", "in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def can_attack_both(self) -> bool: \"\"\" Checks", "def armor(self) -> Union[int, float]: \"\"\" Returns the armor of", "completed in order to detect and Photoncannons also need to", "= self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range > 0, f\"Checking for an ability", "buff in self.buffs def train(self, unit: UnitTypeId, queue: bool =", "return self._type_data._proto.movement_speed @property def is_mineral_field(self) -> bool: \"\"\" Checks if", "-> int: \"\"\" Returns the amount of remaining frames of", "the unit is attacking. Only works for own units. \"\"\"", "def is_transforming(self) -> bool: \"\"\" Checks if the unit transforming.", "\"\"\" Checks if the unit is a structure. \"\"\" return", "Returns the unique tag of the unit. \"\"\" return self._proto.tag", "int: \"\"\" Returns the amount of gas remaining in a", "unit is a mineral field. \"\"\" return self._type_data.has_minerals @property def", "None, queue: bool = False) -> UnitCommand: \"\"\" Orders the", "much cargo space is currently left in the unit. \"\"\"", "self._proto.cloak @property def is_cloaked(self) -> bool: \"\"\" Checks if the", "that can't attack. Usage: if unit.weapon_cooldown == 0: self.actions.append(unit.attack(target)) elif", "return self._proto.facing # TODO: a function that checks if this", "the unit can air attack at all. Does not include", "own units. \"\"\" return self.type_id in transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache", "self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self) -> Union[int, float]: \"\"\" Returns the", "shields. \"\"\" if self._proto.health_max == 0: return 0 return self._proto.health", "self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def __hash__(self): return self.tag def __eq__(self, other):", "the starting point, the unit will patrol in a circle.", "weapon: return weapon.range return 0 @property_immutable_cache def can_attack_air(self) -> bool:", "is_psionic(self) -> bool: \"\"\" Checks if the unit has the", "in self._type_data.attributes @property def is_mechanical(self) -> bool: \"\"\" Checks if", "[0,1].\"\"\" return self._proto.build_progress @property def is_ready(self) -> bool: \"\"\" Checks", "return {BuffId(buff_id) for buff_id in self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self) ->", ":param unit: :param queue: \"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability],", "self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius + target.radius + unit_attack_range + bonus_distance)", "bot_object: \"\"\" self._proto = proto_data self._bot_object = bot_object # Used", "This is a value of 1 or 2 in a", "return f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache def type_id(self) -> UnitTypeId: \"\"\"", "health(self) -> Union[int, float]: \"\"\" Returns the health of the", "self.orders: return False if isinstance(abilities, AbilityId): abilities = {abilities} return", "int: \"\"\" Returns the amount of minerals remaining in a", "def can_attack(self) -> bool: \"\"\" Checks if the unit can", "own units. \"\"\" return self.is_using_ability(IS_RETURNING) @property_immutable_cache def is_collecting(self) -> bool:", "bonus_distance: \"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range > 0, f\"Checking", "up to seven patrol points. If the last point is", "Optional[List[UnitTypeId]]: \"\"\" Building tech equality, e.g. OrbitalCommand is the same", "bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress, ) def __init__(self,", "class Unit: def __init__(self, proto_data, bot_object: BotAI): \"\"\" :param proto_data:", "idle. \"\"\" warnings.warn(\"noqueue will be removed soon, please use is_idle", "\"\"\" If unit is addon (techlab or reactor), returns the", "Returns the amount of energy the unit has. Returns 0", "the unit has. Returns 0 for non-protoss units. \"\"\" if", "0: return 0 return self._proto.shield / self._proto.shield_max @property def energy(self)", "an ability ({ability_id}) that has no cast range\" ability_target_type =", "does this do? return self._proto.engaged_target_tag # Unit functions def has_buff(self,", "as a float in range [0,2π). 0 is in direction", "'upgrade'. Requires UpgradeId to be passed instead of AbilityId. :param", ".game_data import AbilityData class UnitOrder: @classmethod def from_proto(cls, proto, bot_object:", "weapon in self._weapons) return False @property_immutable_cache def air_dps(self) -> Union[int,", "need a 'target'. :param target: :param queue: \"\"\" return self(AbilityId.HARVEST_RETURN,", "return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class Unit: def __init__(self, proto_data, bot_object:", "float]: \"\"\" Returns the maximum health of the unit. Does", ":param queue: \"\"\" return self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def move(self, position:", "-> str: return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class Unit: def __init__(self,", "include shields. \"\"\" return self._proto.health_max @property def health_percentage(self) -> Union[int,", "buff_id in self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self) -> bool: \"\"\" Checks", "bool = False) -> UnitCommand: \"\"\" Order an SCV or", "float]: \"\"\" Returns the detection distance of the unit. \"\"\"", "\"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self) -> bool: \"\"\" Checks", "== IS_ENEMY @property def owner_id(self) -> int: \"\"\" Returns the", "\"\"\" return self(AbilityId.HOLDPOSITION, queue=queue) def stop(self, queue: bool = False)", "weapon.range return 0 @property_immutable_cache def bonus_damage(self): \"\"\" Returns a tuple", ":param target: :param progress: \"\"\" self.ability = ability self.target =", "\"\"\" return self._type_data.unit_alias @property_immutable_cache def _weapons(self): \"\"\" Returns the weapons", "base.\"\"\" return self._proto.assigned_harvesters @property def ideal_harvesters(self) -> int: \"\"\" Returns", "return self._bot_object._game_data.units[self._proto.unit_type] @property def name(self) -> str: \"\"\" Returns the", ".ids.unit_typeid import UnitTypeId from .position import Point2, Point3 from .unit_command", "field or vespene geyser to mine. Only works for own", "for units without a timer bar. \"\"\" return self._proto.buff_duration_max #", "<= cast_range + self.radius + bonus_distance ) return False @property", "for buff_id in self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self) -> bool: \"\"\"", "form '(bonus damage, armor type)' if unit does 'bonus damage'", "as tuple without conversion to Point2. \"\"\" return self._proto.pos.x, self._proto.pos.y", "if a worker is carrying a resource. \"\"\" return not", "radar_range(self) -> Union[int, float]: return self._proto.radar_range @property def is_selected(self) ->", "has. Does not include shields. \"\"\" if self._proto.health_max == 0:", "unit is able to cast an ability on the target", "\"\"\" if isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5 return", "target's radius when calculating distance to target. :param target: :param", "\"\"\" Checks if a geyser has any gas remaining. You", "to a mineral field or vespene geyser to mine. Only", "def is_repairing(self) -> bool: \"\"\" Checks if the unit is", "Point2. Attacking a position will make the unit move there", "-> UnitCommand: \"\"\" Orders the unit to return resource. Does", "\"\"\" if self.orders: if isinstance(self.orders[0].target, int): return self.orders[0].target else: return", "self._proto = proto_data self._bot_object = bot_object # Used by property_immutable_cache", "Checks if the unit is idle. \"\"\" warnings.warn(\"noqueue will be", "target_in_range(self, target: Unit, bonus_distance: Union[int, float] = 0) -> bool:", "sensor tower. \"\"\" return self._proto.is_blip @property def is_powered(self) -> bool:", "-> UnitCommand: \"\"\" Orders the unit to move to 'position'.", "not include upgrades. \"\"\" if self.can_attack_air: weapon = next((weapon for", "\"\"\" Building type equality, e.g. FlyingOrbitalCommand is the same as", "repair(self, repair_target: Unit, queue: bool = False) -> UnitCommand: \"\"\"", "self._bot_object._game_data.units[self._proto.unit_type] @property def name(self) -> str: \"\"\" Returns the name", "Union[AbilityId, Set[AbilityId]]) -> bool: \"\"\" Check if the unit is", "self.radius + target.radius + bonus_distance) ** 2 ) # For", "currently training or researching. \"\"\" return self._proto.is_active # PROPERTIES BELOW", "the amount of cargo space the unit needs. \"\"\" return", "make the unit move there and attack everything on its", "queue: \"\"\" return self(AbilityId.HOLDPOSITION, queue=queue) def stop(self, queue: bool =", "def is_enemy(self) -> bool: \"\"\" Checks if the unit is", "def attack_upgrade_level(self) -> int: \"\"\" Returns the upgrade level of", "self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self) -> bool: \"\"\" Checks if the", "\"\"\" if self.can_attack_air: weapon = next((weapon for weapon in self._weapons", "bool: \"\"\" Checks if the unit is a detector. Has", "weapon_cooldown(self) -> Union[int, float]: \"\"\" Returns the time until the", "from __future__ import annotations import warnings from typing import Any,", "UnitTypeId found in sc2/ids/unit_typeid. Caches all type_ids of the same", "\"\"\" return self._proto.buff_duration_remain @property def buff_duration_max(self) -> int: \"\"\" Returns", "is_structure(self) -> bool: \"\"\" Checks if the unit is a", "@property_immutable_cache def is_carrying_minerals(self) -> bool: \"\"\" Checks if a worker", "from .ids.unit_typeid import UnitTypeId from .position import Point2, Point3 from", "\"\"\" Returns the maximum shield points the unit can have.", "upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return True if self._weapons:", "multiple attacks (Thor). if self._weapons: for weapon in self._weapons: if", "BuffId from .ids.upgrade_id import UpgradeId from .ids.unit_typeid import UnitTypeId from", "Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism. \"\"\" return", "{UNIT_BATTLECRUISER, UNIT_ORACLE}: return True if self._weapons: return any(weapon.type in TARGET_GROUND", "IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING,", "return self._type_data._proto.weapons except: return None @property_immutable_cache def can_attack(self) -> bool:", "\"\"\" Checks if the unit has the 'psionic' attribute. \"\"\"", "not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self) -> Union[int, float]: \"\"\" Returns", "passed instead of AbilityId. :param upgrade: :param queue: \"\"\" return", "transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self) -> bool: \"\"\" Checks", "\"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self) -> bool: \"\"\" Checks", "self._weapons: for weapon in self._weapons: if weapon.damage_bonus: b = weapon.damage_bonus[0]", "bool: \"\"\" Checks if the unit is hostile. \"\"\" return", "\"\"\" Returns the owner of the unit. This is a", ":param target: :param bonus_distance: \"\"\" # TODO: Fix this because", "-> bool: \"\"\" Checks if the unit has the 'mechanical'", "this do? return self._proto.engaged_target_tag # Unit functions def has_buff(self, buff:", "the sight range of the unit. \"\"\" return self._type_data._proto.sight_range @property", "if the unit can air attack at all. Does not", "status of the unit.\"\"\" return self._proto.display_type == IS_VISIBLE @property def", "from .constants import ( transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL,", "detected by a sensor tower. \"\"\" return self._proto.is_blip @property def", "armor_upgrade_level(self) -> int: \"\"\" Returns the upgrade level of the", "in transforming and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self) -> bool: \"\"\"", "Overlord or WarpPrism. \"\"\" return {unit.tag for unit in self._proto.passengers}", "TARGET_GROUND), None) if weapon: return weapon.range return 0 @property_immutable_cache def", "Checks if the unit can air attack at all. Does", "return self._proto.health_max @property def health_percentage(self) -> Union[int, float]: \"\"\" Returns", "by property_immutable_cache self.cache = {} def __repr__(self) -> str: \"\"\"", "bool: \"\"\" Checks if the unit can attack at all.", "selected. \"\"\" return self._proto.is_selected @property def is_on_screen(self) -> bool: \"\"\"", "returns None \"\"\" return self._type_data.unit_alias @property_immutable_cache def _weapons(self): \"\"\" Returns", "detected. \"\"\" return self._proto.is_hallucination @property def attack_upgrade_level(self) -> int: \"\"\"", "data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property def name(self) -> str: \"\"\"", "float = 1e-3) -> bool: \"\"\" Function not completed yet", "space the unit needs. \"\"\" return self._type_data.cargo_size @property def cargo_max(self)", "without energy. \"\"\" return self._proto.energy_max @property def energy_percentage(self) -> Union[int,", "self._proto.health_max @property def health_percentage(self) -> Union[int, float]: \"\"\" Returns the", "= UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self) -> \"UnitTypeData\": \"\"\"", "a structure. \"\"\" return IS_STRUCTURE in self._type_data.attributes @property def is_light(self)", "** 2 ) def in_ability_cast_range( self, ability_id: AbilityId, target: Union[Unit,", "queue: \"\"\" return self(AbilityId.MOVE_MOVE, target=position, queue=queue) def scan_move(self, *args, **kwargs)", "to move on its own if it is attacked, enemy", "-> bool: \"\"\" Checks if the unit is flying. \"\"\"", "order, returns None if the unit is idle \"\"\" if", "name(self) -> str: \"\"\" Returns the name of the unit.", "in TARGET_AIR), None) if weapon: return weapon.range return 0 @property_immutable_cache", "Returns how much cargo space is currently used in the", "is_vespene_geyser(self) -> bool: \"\"\" Checks if the unit is a", "= self._bot_object._game_data.units[unit.value].creation_ability.id return self(warpgate_abilities[normal_creation_ability], target=position) def attack(self, target: Union[Unit, Point2,", "unit type data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property def name(self) ->", "buff_duration_max(self) -> int: \"\"\" Returns the maximum amount of frames", "ground and air units. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return", "self._proto.cloak is IS_REVEALED @property def can_be_attacked(self) -> bool: \"\"\" Checks", "vespene geyser or gas extraction building. \"\"\" return self._type_data.has_vespene @property", "if weapon: return weapon.range return 0 @property_immutable_cache def can_attack_air(self) ->", "many harvesters mining, a negative int if it has too", "@property def shield(self) -> Union[int, float]: \"\"\" Returns the shield", "that checks if this unit is facing another unit def", "if the unit is a non-empty vespene geyser or gas", "0 for units without energy. \"\"\" if self._proto.energy_max == 0:", "the unit is burrowed. \"\"\" return self._proto.is_burrowed @property def is_hallucination(self)", "@property def cargo_max(self) -> Union[float, int]: \"\"\" How much cargo", "or 2 in a two player game. \"\"\" return self._proto.owner", "ideal harverster count for unit. 3 for gas buildings, 2*n", "units without a shield. \"\"\" return self._proto.shield_upgrade_level @property def buff_duration_remain(self)", "Unit): return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range + self.radius +", "-> Set[Unit]: \"\"\" Returns the units inside a Bunker, CommandCenter,", "weapon: return (weapon.damage * weapon.attacks) / weapon.speed return 0 @property_immutable_cache", "if isinstance(self.orders[0].target, int): return self.orders[0].target else: return Point2.from_proto(self.orders[0].target) return None", "warp_in(self, unit: UnitTypeId, position: Union[Point2, Point3]) -> UnitCommand: \"\"\" Orders", "that unit) or Point2. :param position: :param queue: \"\"\" return", "scan_move(self, *args, **kwargs) -> UnitCommand: \"\"\" Deprecated: This ability redirects", "a unit to gather minerals or gas. 'Target' must be", "AbilityId. :param upgrade: :param queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def", "@property def is_light(self) -> bool: \"\"\" Checks if the unit", "Returns the maximum health of the unit. Does not include", "queue: bool = False) -> UnitCommand: \"\"\" Orders unit to", "Race: \"\"\" Returns the race of the unit \"\"\" return", "self._proto.orders def is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]]) -> bool: \"\"\" Check", "None) if weapon: return (weapon.damage * weapon.attacks) / weapon.speed return", "-> Union[int, float]: \"\"\" Returns the percentage of health the", "def is_patrolling(self) -> bool: \"\"\" Checks if a unit is", "Union[int, float]: \"\"\" Returns the amount of energy the unit", "-> bool: \"\"\" Checks if the unit is visible for", "return self._proto.cargo_space_taken @property def has_cargo(self) -> bool: \"\"\" Checks if", "new orders. :param queue: \"\"\" return self(AbilityId.HOLDPOSITION, queue=queue) def stop(self,", "UnitCommand: \"\"\" Orders unit to train another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV))", "for own units. \"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self) ->", "to target. :param target: :param bonus_distance: \"\"\" # TODO: Fix", "float]: \"\"\" Returns the sight range of the unit. \"\"\"", "Unit (to follow that unit) or Point2. :param position: :param", "to move to 'position'. Target can be a Unit (to", "from .data import Alliance, Attribute, CloakState, DisplayType, Race, TargetType, warpgate_abilities,", "Point3]) -> Union[int, float]: \"\"\" Using the 2d distance between", "IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from", "of minerals remaining in a mineral field. \"\"\" return self._proto.mineral_contents", "-> bool: \"\"\" Checks if unit is idle. \"\"\" return", "any units loaded. \"\"\" return bool(self._proto.cargo_space_taken) @property def cargo_size(self) ->", "\"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property def name(self) -> str: \"\"\" Returns", "self(warpgate_abilities[normal_creation_ability], target=position) def attack(self, target: Union[Unit, Point2, Point3], queue: bool", "It will not move until it gets new orders. :param", "the 'massive' attribute. \"\"\" return IS_MASSIVE in self._type_data.attributes @property def", "bot has vision of the position of the unit! It", "return any(weapon.type in TARGET_BOTH for weapon in self._weapons) return False", "is_idle instead\", DeprecationWarning, stacklevel=2) return self.is_idle @property def is_idle(self) ->", "return False @property_immutable_cache def ground_dps(self) -> Union[int, float]: \"\"\" Returns", "position: :param queue: \"\"\" return self(AbilityId.PATROL, target=position, queue=queue) def repair(self,", "self._weapons if weapon.type in TARGET_AIR), None) if weapon: return (weapon.damage", "0 return self._proto.shield / self._proto.shield_max @property def energy(self) -> Union[int,", "and the target position. Can be queued up to seven", "this because immovable units (sieged tank, planetary fortress etc.) have", "used in the unit. Note that some units take up", "Only works for own units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def", "not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def", "queue: bool = False) -> UnitCommand: \"\"\" Orders a unit", "CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV,", "much cargo space is available at maximum. \"\"\" return self._proto.cargo_space_max", "mineral field or vespene geyser to deliver resources to townhall.", "bool: \"\"\" Checks if the unit transforming. Only works for", "proto_data self._bot_object = bot_object # Used by property_immutable_cache self.cache =", "\"\"\" Checks if the unit has the 'massive' attribute. \"\"\"", "__repr__(self) -> str: \"\"\" Returns string of this form: Unit(name='SCV',", "Target.PointOrUnit.value} and isinstance(target, Unit): return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range", "can_be_attacked(self) -> bool: \"\"\" Checks if the unit is revealed", "maximum. \"\"\" return self._proto.cargo_space_max @property def cargo_left(self) -> Union[float, int]:", "self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack: return self._proto.weapon_cooldown return -1", "shield points the unit has. Returns 0 for non-protoss units.", "harverster count for unit. 3 for gas buildings, 2*n for", "is on its way to a mineral field or vespene", "the unit is controlled by the bot. \"\"\" return self._proto.alliance", "return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self, unit: UnitTypeId, position: Union[Point2, Point3])", "\"\"\" Checks if unit has buff 'buff'. \"\"\" assert isinstance(buff,", "\"\"\" return self._proto.cargo_space_max - self._proto.cargo_space_taken @property def assigned_harvesters(self) -> int:", "self.actions.append(unit.attack(target)) elif unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\" if", "Union[int, float]: return self._proto.radar_range @property def is_selected(self) -> bool: \"\"\"", "addon attached. \"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self) -> Point2:", "both ground and air units. \"\"\" if self.type_id == UNIT_BATTLECRUISER:", "\"\"\" Orders a unit to gather minerals or gas. 'Target'", "can start to move on its own if it is", "(techlab or reactor), returns the position where a terran building", "return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self) -> bool: \"\"\"", "-> int: \"\"\" Returns the upgrade level of the units", "war or attacking enemy units on higher, not visible ground", ".position import Point2, Point3 from .unit_command import UnitCommand warnings.simplefilter(\"once\") if", "Point2, Point3]) -> Union[int, float]: \"\"\" Using the 2d distance", ") # For casting abilities on the ground, like queen", "self._type_data.has_vespene @property def health(self) -> Union[int, float]: \"\"\" Returns the", "assert cast_range > 0, f\"Checking for an ability ({ability_id}) that", "queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def research(self, upgrade: UpgradeId,", "self._proto.unit_type if unit_type not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return", "self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache def buffs(self) -> Set: \"\"\" Returns", "carrying a resource. \"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self)", "unit is addon (techlab or reactor), returns the position where", "unit to train another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param", "-> bool: \"\"\" Checks if a unit is patrolling. Only", "to warp in 'unit' at 'position'. :param unit: :param queue:", "/ weapon.speed return 0 @property_immutable_cache def ground_range(self) -> Union[int, float]:", "def _weapons(self): \"\"\" Returns the weapons of the unit. \"\"\"", "def air_dps(self) -> Union[int, float]: \"\"\" Returns the dps against", "\"\"\" return self._proto.display_type == IS_SNAPSHOT @property def is_visible(self) -> bool:", "\"\"\" Returns the ideal harverster count for unit. 3 for", "the 2d position of the unit. \"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache", "self.target = target self.progress = progress def __repr__(self) -> str:", "tag=4396941328). \"\"\" return f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache def type_id(self) ->", "in 'unit' at 'position'. :param unit: :param queue: \"\"\" normal_creation_ability", "attack both ground and air units. \"\"\" if self.type_id ==", "weapon.range return 0 @property_immutable_cache def can_attack_air(self) -> bool: \"\"\" Checks", "Requires UpgradeId to be passed instead of AbilityId. :param upgrade:", "bool: \"\"\" Checks if the unit is a structure. \"\"\"", "the unit. \"\"\" return self._proto.tag @property def is_structure(self) -> bool:", "\"\"\" :param proto_data: :param bot_object: \"\"\" self._proto = proto_data self._bot_object", "FlyingOrbitalCommand is the same as OrbitalCommand For flying OrbitalCommand, this", "str: return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\" class Unit: def __init__(self, proto_data,", "str: \"\"\" Returns the name of the unit. \"\"\" return", "if the unit is using one of the given abilities.", "addon \"\"\" return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def passengers(self) -> Set[Unit]:", "orders(self) -> List[UnitOrder]: \"\"\" Returns the a list of the", "Checks if the unit is an SCV or MULE that", "** 2 ) # For casting abilities on the ground,", "is revealed or not cloaked and therefore can be attacked.", "-> bool: \"\"\" Check if the unit is using one", "unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack: return", "@property def build_progress(self) -> Union[int, float]: \"\"\" Returns completion in", "Union[int, float]: \"\"\" Using the 2d distance between self and", "float]: \"\"\" Returns the maximum amount of energy the unit", "-> bool: \"\"\" Checks if the unit is moving. Only", "building. :param target: :param queue: \"\"\" return self(AbilityId.HARVEST_GATHER, target=target, queue=queue)", "to repair. :param repair_target: :param queue: \"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target,", "CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism. \"\"\" return {Unit(unit,", "unit. \"\"\" return self._type_data._proto.sight_range @property def movement_speed(self) -> Union[int, float]:", "Usage: if unit.weapon_cooldown == 0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown < 0:", "self._weapons: return any(weapon.type in TARGET_AIR for weapon in self._weapons) return", "not give any information about the cloak status of the", "distance_to(self, p: Union[Unit, Point2, Point3]) -> Union[int, float]: \"\"\" Using", "(like HT storm). :param ability_id: :param target: :param bonus_distance: \"\"\"", "attack_upgrade_level(self) -> int: \"\"\" Returns the upgrade level of the", "TARGET_BOTH for weapon in self._weapons) return False @property_immutable_cache def can_attack_ground(self)", "need the space. :param queue: \"\"\" return self(AbilityId.STOP, queue=queue) def", "return self._proto.is_powered @property def is_active(self) -> bool: \"\"\" Checks if", "<= (self.radius + target.radius + unit_attack_range + bonus_distance) ** 2", "unit is flying. \"\"\" return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property def", "the unit. Does not include shields. \"\"\" return self._proto.health @property", "return Race(self._type_data._proto.race) @property def tag(self) -> int: \"\"\" Returns the", "return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range + self.radius + target.radius", "self._proto.facing # TODO: a function that checks if this unit", "\"\"\" Returns the dps against ground units. Does not include", "-> Union[int, float]: \"\"\" Returns the range against ground units.", "a worker is carrying a resource. \"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs)", "a gas extraction building. :param target: :param queue: \"\"\" return", "@property def is_massive(self) -> bool: \"\"\" Checks if the unit", "all type_ids of the same unit type. \"\"\" unit_type =", "\"\"\" Orders a unit to stop moving. It will not", "has_vespene(self) -> bool: \"\"\" Checks if a geyser has any", "\"\"\" Checks if the unit is a detector. Has to", "@property def shield_upgrade_level(self) -> int: \"\"\" Returns the upgrade level", "of amount of energy the unit has. Returns 0 for", "<filename>sc2/unit.py from __future__ import annotations import warnings from typing import", "ground_dps(self) -> Union[int, float]: \"\"\" Returns the dps against ground", "for own units. \"\"\" return self.type_id in transforming and self.is_using_ability(transforming[self.type_id])", "self.is_powered) @property def radar_range(self) -> Union[int, float]: return self._proto.radar_range @property", "the bot. \"\"\" return self._proto.alliance == IS_MINE @property def is_enemy(self)", "def is_collecting(self) -> bool: \"\"\" Checks if a unit is", "Checks if the unit is revealed. \"\"\" return self._proto.cloak is", "TargetType, Target from .ids.ability_id import AbilityId from .ids.buff_id import BuffId", "unit \"\"\" return Race(self._type_data._proto.race) @property def tag(self) -> int: \"\"\"", "is carrying (gold-)minerals. \"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self)", "= False) -> UnitCommand: \"\"\" Orders the unit to move", "\"\"\" return self._proto.health @property def health_max(self) -> Union[int, float]: \"\"\"", "\"\"\" Checks if a unit is returning from mineral field", "too few mining.\"\"\" return self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self)", "__init__(self, proto_data, bot_object: BotAI): \"\"\" :param proto_data: :param bot_object: \"\"\"", "any(weapon.type in TARGET_AIR for weapon in self._weapons) return False @property_immutable_cache", "def health_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage of", "0, f\"Checking for an ability ({ability_id}) that has no cast", "def can_attack_ground(self) -> bool: \"\"\" Checks if the unit can", "in abilities @property_immutable_cache def is_moving(self) -> bool: \"\"\" Checks if", "type. \"\"\" unit_type = self._proto.unit_type if unit_type not in self._bot_object._game_data.unit_types:", "float]: \"\"\" Returns the percentage of amount of energy the", "is visible for the bot. NOTE: This means the bot", "attacks (Thor). if self._weapons: for weapon in self._weapons: if weapon.damage_bonus:", "snapshot for the bot. Enemy buildings that have been scouted", "-> Union[int, float]: \"\"\" Returns the percentage of amount of", "UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self) -> \"UnitTypeData\": \"\"\" Provides", "the last point is the same as the starting point,", "worker or MULE is carrying (gold-)minerals. \"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs)", "self._proto.cargo_space_max - self._proto.cargo_space_taken @property def assigned_harvesters(self) -> int: \"\"\" Returns", "DeprecationWarning, stacklevel=2) return self.is_idle @property def is_idle(self) -> bool: \"\"\"", "@property def is_selected(self) -> bool: \"\"\" Checks if the unit", "@property def noqueue(self) -> bool: \"\"\" Checks if the unit", "@property def unit_alias(self) -> Optional[UnitTypeId]: \"\"\" Building type equality, e.g.", "def is_facing_unit(self, other_unit: Unit, angle_error: float = 1e-3) -> bool:", "@property def detect_range(self) -> Union[int, float]: \"\"\" Returns the detection", "unit_alias(self) -> Optional[UnitTypeId]: \"\"\" Building type equality, e.g. FlyingOrbitalCommand is", "Returns a tuple of form '(bonus damage, armor type)' if", ".ids.buff_id import BuffId from .ids.upgrade_id import UpgradeId from .ids.unit_typeid import", "= {abilities} return self.orders[0].ability.id in abilities @property_immutable_cache def is_moving(self) ->", "shields. \"\"\" return self._proto.health @property def health_max(self) -> Union[int, float]:", "the unit. This is a value of 1 or 2", "abilities on the ground, like queen creep tumor, ravager bile,", "for own units. \"\"\" if not self.orders: return False if", "0 for non-protoss units. \"\"\" return self._proto.shield_max @property def shield_percentage(self)", "bool: \"\"\" Checks if this unit has any units loaded.", "\"\"\" return self._proto.health_max @property def health_percentage(self) -> Union[int, float]: \"\"\"", "return self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def return_resource(self, target: Unit = None,", "stacklevel=2) return self.is_idle @property def is_idle(self) -> bool: \"\"\" Checks", "def passengers(self) -> Set[Unit]: \"\"\" Returns the units inside a", "returns the position where a terran building has to land", "< 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack: return self._proto.weapon_cooldown", "self.type_id == UNIT_BATTLECRUISER: return True if self._weapons: return any(weapon.type in", "against ground units. Does not include upgrades. \"\"\" if self.type_id", "\"\"\" Checks if the unit is controlled by the bot.", "count for unit. 3 for gas buildings, 2*n for n", "vespene geyser to deliver resources to townhall. Only works for", ".cache import property_immutable_cache, property_mutable_cache from .constants import ( transforming, IS_STRUCTURE,", "currently selected. \"\"\" return self._proto.is_selected @property def is_on_screen(self) -> bool:", "4 if self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_ground: weapon", "\"\"\" return Race(self._type_data._proto.race) @property def tag(self) -> int: \"\"\" Returns", "the unit has the 'light' attribute. \"\"\" return IS_LIGHT in", "if the unit is cloaked. \"\"\" return self._proto.cloak in IS_CLOAKED", "-> bool: \"\"\" Returns True if the unit is your", "return self._proto.attack_upgrade_level @property def armor_upgrade_level(self) -> int: \"\"\" Returns the", "-> UnitCommand: \"\"\" Orders a unit to gather minerals or", "if not self.orders: return False if isinstance(abilities, AbilityId): abilities =", "IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND,", "fortress etc.) have a little lower range than this formula", "\"\"\" return IS_ARMORED in self._type_data.attributes @property def is_biological(self) -> bool:", "0 return self._proto.energy / self._proto.energy_max @property def is_snapshot(self) -> bool:", "TARGET_GROUND for weapon in self._weapons) return False @property_immutable_cache def ground_dps(self)", "( self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius + target.radius + unit_attack_range +", "1e-3) -> bool: \"\"\" Function not completed yet :param other_unit:", "not completed yet :param other_unit: :param angle_error: \"\"\" pass @property", "will not move until it gets new orders. :param queue:", "hold_position(self, queue: bool = False) -> UnitCommand: \"\"\" Orders a", "CAN_BE_ATTACKED @property_immutable_cache def buffs(self) -> Set: \"\"\" Returns the set", "int): return self.orders[0].target else: return Point2.from_proto(self.orders[0].target) return None @property def", "\"\"\" Checks if the unit is an SCV that is", "\"\"\" return self._type_data._proto.movement_speed @property def is_mineral_field(self) -> bool: \"\"\" Checks", "other_unit: Unit, angle_error: float = 1e-3) -> bool: \"\"\" Function", "\"\"\" Half of unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius", "self.buffs def train(self, unit: UnitTypeId, queue: bool = False) ->", "unit to patrol between position it has when the command", "repair_target: :param queue: \"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def __hash__(self):", "self._proto.cargo_space_max @property def cargo_left(self) -> Union[float, int]: \"\"\" Returns how", "= {} def __repr__(self) -> str: \"\"\" Returns string of", "def orders(self) -> List[UnitOrder]: \"\"\" Returns the a list of", "field. \"\"\" return self._type_data.has_minerals @property def is_vespene_geyser(self) -> bool: \"\"\"", "\"\"\" Returns the percentage of shield points the unit has.", "value of 1 or 2 in a two player game.", "Photoncannons also need to be powered. \"\"\" return self.is_ready and", "@classmethod def from_proto(cls, proto, bot_object: BotAI): return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos", "bot. Enemy buildings that have been scouted and are in", "Does not include upgrades or buffs. \"\"\" return self._type_data._proto.movement_speed @property", "try: return self.tag == other.tag except: return False def __call__(self,", "like queen creep tumor, ravager bile, HT storm if ability_target_type", "def tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\" Building tech equality, e.g. OrbitalCommand", "upgrades \"\"\" return self._type_data._proto.armor @property def sight_range(self) -> Union[int, float]:", "is_collecting(self) -> bool: \"\"\" Checks if a unit is gathering", "float]: \"\"\" Returns the time until the unit can fire", "-> Union[int, float]: \"\"\" Half of unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void)", "or Point2 (if it is a Position) from the first", ".ids.upgrade_id import UpgradeId from .ids.unit_typeid import UnitTypeId from .position import", "remaining in a mineral field. \"\"\" return self._proto.mineral_contents @property def", "ENEMIES @property_mutable_cache def orders(self) -> List[UnitOrder]: \"\"\" Returns the a", ".constants import ( transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE,", "if the unit can attack at all. \"\"\" # TODO", "\"\"\" Returns a tuple of form '(bonus damage, armor type)'", "\"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self) -> Union[int, float]:", "units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self) -> bool: \"\"\"", "Returns the set of current buffs the unit has. \"\"\"", "from .position import Point2, Point3 from .unit_command import UnitCommand warnings.simplefilter(\"once\")", "return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius + target.radius + unit_attack_range", "cargo space is currently left in the unit. \"\"\" return", "is hostile. \"\"\" return self._proto.alliance == IS_ENEMY @property def owner_id(self)", "- self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self) -> Union[int, float]: \"\"\" Returns", "addon (techlab or reactor), returns the position where a terran", "unit is facing as a float in range [0,2π). 0", "int if it has too few mining.\"\"\" return self._proto.assigned_harvesters -", "a unit to stop, but can start to move on", "to detect and Photoncannons also need to be powered. \"\"\"", "self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range + self.radius + bonus_distance ) return", "\"\"\" return self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def return_resource(self, target: Unit =", "unit has. Does not include shields. \"\"\" if self._proto.health_max ==", "distance between self and p. To calculate the 3d distance,", "facing another unit def is_facing_unit(self, other_unit: Unit, angle_error: float =", "Returns the detection distance of the unit. \"\"\" return self._proto.detect_range", "return {unit.tag for unit in self._proto.passengers} @property def cargo_used(self) ->", "the unit is flying. \"\"\" return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property", "-> Point2: \"\"\" Returns the 2d position of the unit.", "self._proto.energy @property def energy_max(self) -> Union[int, float]: \"\"\" Returns the", "for the bot. Enemy buildings that have been scouted and", "or target.type_id == UNIT_COLOSSUS): unit_attack_range = self.air_range else: return False", "\"\"\" Returns the 2d position of the unit. \"\"\" return", "units. \"\"\" if self._proto.shield_max == 0: return 0 return self._proto.shield", "return self._proto.health @property def health_max(self) -> Union[int, float]: \"\"\" Returns", "True if self._weapons: return any(weapon.type in TARGET_AIR for weapon in", "Returns True if the unit is your own hallucination or", "Only works for own units. \"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache def", "energy the unit has. Returns 0 for units without energy.", "if the target is in range. Includes the target's radius", "position. Can be queued up to seven patrol points. If", "Unit(name='SCV', tag=4396941328). \"\"\" return f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache def type_id(self)", "tech equality, e.g. OrbitalCommand is the same as CommandCenter For", "if a unit is patrolling. Only works for own units.", "can be a Unit (to follow that unit) or Point2.", "bool = False) -> UnitCommand: \"\"\" Orders unit to attack.", "unit can fire again, returns -1 for units that can't", "return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def __hash__(self): return self.tag def __eq__(self,", "Returns how much cargo space is currently left in the", "units shield. # NOTE: Returns 0 for units without a", "__future__ import annotations import warnings from typing import Any, Dict,", "in self._proto.passengers} @property_mutable_cache def passengers_tags(self) -> Set[int]: \"\"\" Returns the", "gathering or returning. Only works for own units. \"\"\" return", "noqueue(self) -> bool: \"\"\" Checks if the unit is idle.", "\"\"\" return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self) -> bool:", "bot. NOTE: This means the bot has vision of the", "weapon in self._weapons if weapon.type in TARGET_GROUND), None) if weapon:", "@property_mutable_cache def passengers_tags(self) -> Set[int]: \"\"\" Returns the tags of", "the shield points the unit has. Returns 0 for non-protoss", "units. \"\"\" return self._proto.shield_max @property def shield_percentage(self) -> Union[int, float]:", "\"\"\" return bool(self._proto.vespene_contents) @property def is_flying(self) -> bool: \"\"\" Checks", "False return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius + target.radius +", "self._type_data.attributes @property def is_massive(self) -> bool: \"\"\" Checks if the", "unit is on the screen. \"\"\" return self._proto.is_on_screen @property def", "self.can_attack_air: weapon = next((weapon for weapon in self._weapons if weapon.type", "TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE,", "Point2, Point3 from .unit_command import UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING: from", "-> bool: \"\"\" Checks if the unit has the 'psionic'", "-> Union[int, float]: \"\"\" Returns direction the unit is facing", "THIS COMMENT ARE NOT POPULATED FOR ENEMIES @property_mutable_cache def orders(self)", "and self.is_using_ability(transforming[self.type_id]) @property_immutable_cache def is_repairing(self) -> bool: \"\"\" Checks if", "\"\"\" Returns the maximum amount of frames of the visible", "will make the unit move there and attack everything on", "an SCV that is currently building. Only works for own", "extraction building. \"\"\" return self._type_data.has_vespene @property def health(self) -> Union[int,", "self.radius + bonus_distance ) return False @property def facing(self) ->", "empty geysers. \"\"\" return bool(self._proto.vespene_contents) @property def is_flying(self) -> bool:", "unit is a structure. \"\"\" return IS_STRUCTURE in self._type_data.attributes @property", "gas extraction building. :param target: :param queue: \"\"\" return self(AbilityId.HARVEST_GATHER,", "unit is completed. \"\"\" return self.build_progress == 1 @property def", "def add_on_tag(self) -> int: \"\"\" Returns the tag of the", "health_max(self) -> Union[int, float]: \"\"\" Returns the maximum health of", "target=position, queue=queue) def scan_move(self, *args, **kwargs) -> UnitCommand: \"\"\" Deprecated:", "way. :param target: :param queue: \"\"\" return self(AbilityId.ATTACK, target=target, queue=queue)", ":param bonus_distance: \"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range > 0,", "elif self.can_attack_air and (target.is_flying or target.type_id == UNIT_COLOSSUS): unit_attack_range =", "@property def is_psionic(self) -> bool: \"\"\" Checks if the unit", "\"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self, unit: UnitTypeId, position: Union[Point2,", "isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p)", "of the unit.\"\"\" return self._proto.display_type == IS_VISIBLE @property def alliance(self)", "of the unit. \"\"\" return self._type_data.name @property def race(self) ->", "\"\"\" Orders a unit to stop, but can start to", "return self.build_progress == 1 @property def cloak(self) -> CloakState: \"\"\"", "self._type_data._proto.movement_speed @property def is_mineral_field(self) -> bool: \"\"\" Checks if the", "return self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache def buffs(self) -> Set: \"\"\"", "for non-protoss units. \"\"\" if self._proto.shield_max == 0: return 0", "TODO: Fix this because immovable units (sieged tank, planetary fortress", "False) -> UnitCommand: \"\"\" Orders the unit to move to", "bool(self._weapons) or self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def can_attack_both(self) ->", "start to move on its own if it is attacked,", "return Point3.from_proto(self._proto.pos) def distance_to(self, p: Union[Unit, Point2, Point3]) -> Union[int,", "workers currently gathering resources at a geyser or mining base.\"\"\"", "p. To calculate the 3d distance, use unit.position3d.distance_to(p) :param p:", "the unit is an SCV that is currently building. Only", "unit. \"\"\" try: return self._type_data._proto.weapons except: return None @property_immutable_cache def", "weapon = next((weapon for weapon in self._weapons if weapon.type in", "reactor), returns the position where a terran building has to", "None @property_immutable_cache def can_attack(self) -> bool: \"\"\" Checks if the", "the unit. \"\"\" return Point3.from_proto(self._proto.pos) def distance_to(self, p: Union[Unit, Point2,", "return self._proto.shield_max @property def shield_percentage(self) -> Union[int, float]: \"\"\" Returns", "self._proto.is_powered @property def is_active(self) -> bool: \"\"\" Checks if the", "target.type_id == UNIT_COLOSSUS): unit_attack_range = self.air_range else: return False return", "AbilityData class UnitOrder: @classmethod def from_proto(cls, proto, bot_object: BotAI): return", "Race, TargetType, warpgate_abilities, TargetType, Target from .ids.ability_id import AbilityId from", "self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def passengers(self) -> Set[Unit]: \"\"\" Returns the", "IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from .data import Alliance, Attribute,", "\"\"\" Checks if the unit is revealed or not cloaked", "How much cargo space is available at maximum. \"\"\" return", "starting point, the unit will patrol in a circle. :param", "amount of energy the unit has. Returns 0 for units", "not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self) -> bool: \"\"\" Checks if", "have been scouted and are in the fog of war", "stop, but can start to move on its own if", "Checks if the unit is powered by a pylon or", "range or other friendly units need the space. :param queue:", "(weapon.damage * weapon.attacks) / weapon.speed return 0 @property_immutable_cache def ground_range(self)", "0) -> bool: \"\"\" Checks if the target is in", "the visible timer bar. # NOTE: Returns 0 for units", "stop moving. It will not move until it gets new", "bool: \"\"\" Checks if the unit is only available as", "For Hive, this returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV, this returns", "as the starting point, the unit will patrol in a", "# Used by property_immutable_cache self.cache = {} def __repr__(self) ->", "weapon.speed return 0 @property_immutable_cache def ground_range(self) -> Union[int, float]: \"\"\"", "= False) -> UnitCommand: \"\"\" Orders unit to build another", "units. Does not include upgrades. \"\"\" if self.can_attack_air: weapon =", "bool: \"\"\" Checks if the unit is controlled by the", "self._type_data.tech_alias @property def unit_alias(self) -> Optional[UnitTypeId]: \"\"\" Building type equality,", "https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius @property def build_progress(self) -> Union[int, float]:", "own if it is attacked, enemy unit is in range", "return self._proto.armor_upgrade_level @property def shield_upgrade_level(self) -> int: \"\"\" Returns the", "abilities. Only works for own units. \"\"\" if not self.orders:", "bool: \"\"\" Checks if the unit is burrowed. \"\"\" return", "the amount of gas remaining in a geyser. \"\"\" return", "how much cargo space is currently left in the unit.", "IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON,", "float in range [0,2π). 0 is in direction of x", "# NOTE: Returns 0 for units without a timer bar.", "Note that some units take up more than one space.", "-> bool: \"\"\" Checks if the unit can air attack", "not include shields. \"\"\" return self._proto.health @property def health_max(self) ->", "the unit. Does not include shields. \"\"\" return self._proto.health_max @property", "1 or 2 in a two player game. \"\"\" return", "Does not include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return", "def scan_move(self, *args, **kwargs) -> UnitCommand: \"\"\" Deprecated: This ability", "has. Returns 0 for units without energy. \"\"\" if self._proto.energy_max", "attacking. Only works for own units. \"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache", "def ground_dps(self) -> Union[int, float]: \"\"\" Returns the dps against", "@property def cloak(self) -> CloakState: \"\"\" Returns cloak state. See", "@property def is_mineral_field(self) -> bool: \"\"\" Checks if the unit", "Orders a unit to gather minerals or gas. 'Target' must", "@property def armor_upgrade_level(self) -> int: \"\"\" Returns the upgrade level", "return IS_BIOLOGICAL in self._type_data.attributes @property def is_mechanical(self) -> bool: \"\"\"", "\"\"\" return self._type_data.tech_alias @property def unit_alias(self) -> Optional[UnitTypeId]: \"\"\" Building", "e.g. OrbitalCommand is the same as CommandCenter For Hive, this", "queue: \"\"\" return self(AbilityId.PATROL, target=position, queue=queue) def repair(self, repair_target: Unit,", "position where a terran building has to land to connect", "armor(self) -> Union[int, float]: \"\"\" Returns the armor of the", "For flying OrbitalCommand, this returns UnitTypeId.OrbitalCommand For SCV, this returns", "between self and p. To calculate the 3d distance, use", "the first order, returns None if the unit is idle", "\"\"\" Check if the unit is using one of the", "For casting abilities that target other units, like transfuse, feedback,", "Nydus, Overlord or WarpPrism. \"\"\" return {Unit(unit, self._bot_object) for unit", "IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self) -> bool: \"\"\" Checks if a", "self._proto.weapon_cooldown return -1 @property def engaged_target_tag(self) -> int: # TODO", "**kwargs) def hold_position(self, queue: bool = False) -> UnitCommand: \"\"\"", "@property def shield_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage", "unit. \"\"\" return self._proto.detect_range @property_immutable_cache def is_detector(self) -> bool: \"\"\"", "IS_ENEMY, IS_CLOAKED, IS_REVEALED, CAN_BE_ATTACKED, IS_CARRYING_MINERALS, IS_CARRYING_VESPENE, IS_CARRYING_RESOURCES, IS_ATTACKING, IS_PATROLLING, IS_GATHERING,", "def is_cloaked(self) -> bool: \"\"\" Checks if the unit is", "3d distance, use unit.position3d.distance_to(p) :param p: \"\"\" if isinstance(p, Unit):", "return 0 return self._proto.health / self._proto.health_max @property def shield(self) ->", "attack everything on its way. :param target: :param queue: \"\"\"", "target: Union[Unit, Point2, Point3], queue: bool = False) -> UnitCommand:", "weapon.type in TARGET_GROUND), None) if weapon: return (weapon.damage * weapon.attacks)", "Order an SCV or MULE to repair. :param repair_target: :param", "the percentage of shield points the unit has. Returns 0", "-> Point3: \"\"\" Returns the 3d position of the unit.", "+ bonus_distance) ** 2 ) # For casting abilities on", "between position it has when the command starts and the", "tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\" Building tech equality, e.g. OrbitalCommand is", "self._bot_object._game_data.abilities[ability_id.value]._proto.target # For casting abilities that target other units, like", "Union[Unit, Point2, Point3], queue: bool = False) -> UnitCommand: \"\"\"", "has. Returns 0 for units without energy. \"\"\" return self._proto.energy", "storm). :param ability_id: :param target: :param bonus_distance: \"\"\" cast_range =", "-> int: \"\"\" Returns the owner of the unit. This", "ability_target_type in {Target.Point.value, Target.PointOrUnit.value} and isinstance( target, (Point2, tuple) ):", "def is_carrying_resource(self) -> bool: \"\"\" Checks if a worker is", "is_selected(self) -> bool: \"\"\" Checks if the unit is currently", "\"\"\" Checks if the unit is on the screen. \"\"\"", "ability_id: AbilityId, target: Union[Unit, Point2], bonus_distance: float = 0 )", "for units that can't attack. Usage: if unit.weapon_cooldown == 0:", "def is_mechanical(self) -> bool: \"\"\" Checks if the unit has", "def health_max(self) -> Union[int, float]: \"\"\" Returns the maximum health", "has. Returns 0 for non-protoss units. \"\"\" return self._proto.shield @property", "return self._proto.alliance == IS_MINE @property def is_enemy(self) -> bool: \"\"\"", "Returns the units inside a Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus,", "Unit: def __init__(self, proto_data, bot_object: BotAI): \"\"\" :param proto_data: :param", "the unit can have. Returns 0 for non-protoss units. \"\"\"", "buffs the unit has. \"\"\" return {BuffId(buff_id) for buff_id in", "space. \"\"\" return self._proto.cargo_space_taken @property def has_cargo(self) -> bool: \"\"\"", "type_ids of the same unit type. \"\"\" unit_type = self._proto.unit_type", "def gather(self, target: Unit, queue: bool = False) -> UnitCommand:", "queue=queue) def repair(self, repair_target: Unit, queue: bool = False) ->", "a weapon. \"\"\" return self._proto.attack_upgrade_level @property def armor_upgrade_level(self) -> int:", "self.cache = {} def __repr__(self) -> str: \"\"\" Returns string", "\"UnitTypeData\": \"\"\" Provides the unit type data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type]", "points. If the last point is the same as the", "works for own units. \"\"\" return self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self)", "def is_mineral_field(self) -> bool: \"\"\" Checks if the unit is", "bool: \"\"\" Checks if a unit is on its way", "unit to gather minerals or gas. 'Target' must be a", "self.type_id == UNIT_ORACLE: return 4 if self.type_id == UNIT_BATTLECRUISER: return", "0 for units without energy. \"\"\" return self._proto.energy_max @property def", "Union[int, float]: \"\"\" Returns completion in range [0,1].\"\"\" return self._proto.build_progress", "False) -> UnitCommand: \"\"\" Orders a unit to stop moving.", "is a Unit) or Point2 (if it is a Position)", "int: \"\"\" Returns a positive int if unit has too", "def buff_duration_remain(self) -> int: \"\"\" Returns the amount of remaining", "at a geyser or mining base.\"\"\" return self._proto.assigned_harvesters @property def", "\"\"\" Orders unit to build another 'unit' at 'position'. Usage:", "@property_immutable_cache def is_attacking(self) -> bool: \"\"\" Checks if the unit", "attribute. \"\"\" return IS_MECHANICAL in self._type_data.attributes @property def is_massive(self) ->", "the 2d distance between self and p. To calculate the", "is gathering or returning. Only works for own units. \"\"\"", "self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self, unit: UnitTypeId, position: Union[Point2, Point3]) ->", "unit belongs to. \"\"\" return self._proto.alliance @property def is_mine(self) ->", "to land to connect to addon \"\"\" return self.position.offset(Point2((-2.5, 0.5)))", "int: \"\"\" Returns the number of workers currently gathering resources", "if the unit transforming. Only works for own units. \"\"\"", "HT storm if ability_target_type in {Target.Point.value, Target.PointOrUnit.value} and isinstance( target,", "attack ground units. \"\"\" if self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return", "if ability_target_type in {Target.Point.value, Target.PointOrUnit.value} and isinstance( target, (Point2, tuple)", "in TARGET_AIR), None) if weapon: return (weapon.damage * weapon.attacks) /", "the 'mechanical' attribute. \"\"\" return IS_MECHANICAL in self._type_data.attributes @property def", "is in range or other friendly units need the space.", "the detection distance of the unit. \"\"\" return self._proto.detect_range @property_immutable_cache", "[0,2π). 0 is in direction of x axis.\"\"\" return self._proto.facing", "return self._proto.is_active # PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED", "Checks if the unit is controlled by the bot. \"\"\"", "def owner_id(self) -> int: \"\"\" Returns the owner of the", "def target_in_range(self, target: Unit, bonus_distance: Union[int, float] = 0) ->", "Orders unit to train another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit:", "unit: UnitTypeId, queue: bool = False) -> UnitCommand: \"\"\" Orders", "# TODO What does this do? return self._proto.engaged_target_tag # Unit", "\"\"\" Order an SCV or MULE to repair. :param repair_target:", "\"\"\" :param ability: :param target: :param progress: \"\"\" self.ability =", "-> int: # TODO What does this do? return self._proto.engaged_target_tag", "in direction of x axis.\"\"\" return self._proto.facing # TODO: a", "upgrade: :param queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self, unit:", "in self._weapons if weapon.type in TARGET_AIR), None) if weapon: return", "\"\"\" Returns the amount of remaining frames of the visible", "return self._type_data.tech_alias @property def unit_alias(self) -> Optional[UnitTypeId]: \"\"\" Building type", "def is_vespene_geyser(self) -> bool: \"\"\" Checks if the unit is", "Set, Tuple, Union, TYPE_CHECKING from .cache import property_immutable_cache, property_mutable_cache from", "upgrade: UpgradeId, queue: bool = False) -> UnitCommand: \"\"\" Orders", "Point2], bonus_distance: float = 0 ) -> bool: \"\"\" Test", "of the same unit type. \"\"\" unit_type = self._proto.unit_type if", "Checks if the unit is revealed or not cloaked and", "that has no cast range\" ability_target_type = self._bot_object._game_data.abilities[ability_id.value]._proto.target # For", "self.ground_range elif self.can_attack_air and (target.is_flying or target.type_id == UNIT_COLOSSUS): unit_attack_range", "def is_active(self) -> bool: \"\"\" Checks if the unit is", "completed. \"\"\" return self.build_progress == 1 @property def cloak(self) ->", "\"\"\" if self._proto.health_max == 0: return 0 return self._proto.health /", "\"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self) -> bool: \"\"\" Checks", "= proto_data self._bot_object = bot_object # Used by property_immutable_cache self.cache", "Does not include upgrades. \"\"\" if self.can_attack_ground: weapon = next((weapon", "def can_attack_air(self) -> bool: \"\"\" Checks if the unit can", "-> bool: \"\"\" Checks if the unit is a structure.", "to seven patrol points. If the last point is the", "target.is_flying: unit_attack_range = self.ground_range elif self.can_attack_air and (target.is_flying or target.type_id", "'bonus damage' against 'armor type'. Possible armor typs are: 'Light',", "the unit move there and attack everything on its way.", "Attribute, CloakState, DisplayType, Race, TargetType, warpgate_abilities, TargetType, Target from .ids.ability_id", "Union[float, int]: \"\"\" How much cargo space is available at", "Caches all type_ids of the same unit type. \"\"\" unit_type", "SCV, this returns None \"\"\" return self._type_data.unit_alias @property_immutable_cache def _weapons(self):", "@property def shield_max(self) -> Union[int, float]: \"\"\" Returns the maximum", "the unit is an SCV or MULE that is currently", "\"\"\" Returns the amount of cargo space the unit needs.", "own units. \"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self) -> bool:", "None) if weapon: return weapon.range return 0 @property_immutable_cache def bonus_damage(self):", "weapon: return weapon.range return 0 @property_immutable_cache def bonus_damage(self): \"\"\" Returns", "the maximum shield points the unit can have. Returns 0", "self(AbilityId.HARVEST_RETURN, target=target, queue=queue) def move(self, position: Union[Point2, Point3], queue: bool", "include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return 6 if", "Returns the a list of the current orders. \"\"\" return", "return self.orders[0].ability.id in abilities @property_immutable_cache def is_moving(self) -> bool: \"\"\"", "some units take up more than one space. \"\"\" return", "-> Point2: \"\"\" If unit is addon (techlab or reactor),", "self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self) -> bool: \"\"\" Checks if the", "one of the given abilities. Only works for own units.", "not include shields. \"\"\" return self._proto.health_max @property def health_percentage(self) ->", "IS_PSIONIC in self._type_data.attributes @property def tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\" Building", "Does not include shields. \"\"\" return self._proto.health @property def health_max(self)", "ability attacks (Oracle, Baneling) or multiple attacks (Thor). if self._weapons:", "* weapon.attacks) / weapon.speed return 0 @property_immutable_cache def ground_range(self) ->", "geyser to deliver resources to townhall. Only works for own", "def sight_range(self) -> Union[int, float]: \"\"\" Returns the sight range", "\"\"\" Checks if the unit is attacking. Only works for", "Point2: \"\"\" If unit is addon (techlab or reactor), returns", "works for own units. \"\"\" if not self.orders: return False", "@property_immutable_cache def _type_data(self) -> \"UnitTypeData\": \"\"\" Provides the unit type", "own units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self) -> bool:", "Optional[Union[int, Point2]]: \"\"\" Returns the target tag (if it is", "\"\"\" return self._proto.pos.x, self._proto.pos.y @property_immutable_cache def position(self) -> Point2: \"\"\"", "is_light(self) -> bool: \"\"\" Checks if the unit has the", "in range or other friendly units need the space. :param", "or self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def can_attack_both(self) -> bool:", "self._weapons: return any(weapon.type in TARGET_GROUND for weapon in self._weapons) return", "\"\"\" return self._type_data._proto.sight_range @property def movement_speed(self) -> Union[int, float]: \"\"\"", "scouted and are in the fog of war or attacking", "patrol in a circle. :param position: :param queue: \"\"\" return", "bool: \"\"\" Checks if the unit is currently training or", ":param proto_data: :param bot_object: \"\"\" self._proto = proto_data self._bot_object =", "is idle. \"\"\" warnings.warn(\"noqueue will be removed soon, please use", "target: :param progress: \"\"\" self.ability = ability self.target = target", "sight range of the unit. \"\"\" return self._type_data._proto.sight_range @property def", "without a shield. \"\"\" return self._proto.shield_upgrade_level @property def buff_duration_remain(self) ->", "name of the unit. \"\"\" return self._type_data.name @property def race(self)", "(self.radius + target.radius + unit_attack_range + bonus_distance) ** 2 )", "\"\"\" return self._proto.owner @property def position_tuple(self) -> Tuple[float, float]: \"\"\"", "= None, queue: bool = False) -> UnitCommand: \"\"\" Orders", "0 is in direction of x axis.\"\"\" return self._proto.facing #", "and therefore can be attacked. \"\"\" return self._proto.cloak in CAN_BE_ATTACKED", "target=target, queue=queue) def gather(self, target: Unit, queue: bool = False)", "space. :param queue: \"\"\" return self(AbilityId.STOP, queue=queue) def patrol(self, position:", "burrowed. \"\"\" return self._proto.is_burrowed @property def is_hallucination(self) -> bool: \"\"\"", "\"\"\" return {BuffId(buff_id) for buff_id in self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self)", "to be completed in order to detect and Photoncannons also", "other): try: return self.tag == other.tag except: return False def", "\"\"\" Checks if the unit has the 'armored' attribute. \"\"\"", "is_moving(self) -> bool: \"\"\" Checks if the unit is moving.", "unit has the 'psionic' attribute. \"\"\" return IS_PSIONIC in self._type_data.attributes", "self._type_data.name @property def race(self) -> Race: \"\"\" Returns the race", "Attribute(b.attribute).name) else: return None @property def armor(self) -> Union[int, float]:", "is a Position) from the first order, returns None if", "\"\"\" Returns the range against air units. Does not include", "UNIT_ORACLE} @property_immutable_cache def can_attack_both(self) -> bool: \"\"\" Checks if the", "target: :param bonus_distance: \"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range >", "only available as a snapshot for the bot. Enemy buildings", "== IS_MINE @property def is_enemy(self) -> bool: \"\"\" Checks if", "without energy. \"\"\" return self._proto.energy @property def energy_max(self) -> Union[int,", "units. Does not include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER:", "in self._weapons: if weapon.damage_bonus: b = weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name)", "target.radius + unit_attack_range + bonus_distance) ** 2 ) def in_ability_cast_range(", "and isinstance( target, (Point2, tuple) ): return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target)", "\"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self) -> bool: \"\"\"", "float]: \"\"\" Returns the percentage of health the unit has.", "p) def target_in_range(self, target: Unit, bonus_distance: Union[int, float] = 0)", "is_active(self) -> bool: \"\"\" Checks if the unit is currently", "of unit. \"\"\" return self._proto.add_on_tag @property def has_add_on(self) -> bool:", "units without energy. \"\"\" return self._proto.energy_max @property def energy_percentage(self) ->", "\"\"\" unit_type = self._proto.unit_type if unit_type not in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type]", "formula if self.can_attack_ground and not target.is_flying: unit_attack_range = self.ground_range elif", "of the addon of unit. \"\"\" return self._proto.add_on_tag @property def", "(gold-)minerals. \"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self) -> bool:", "the health of the unit. Does not include shields. \"\"\"", "is addon (techlab or reactor), returns the position where a", "UnitOrder: @classmethod def from_proto(cls, proto, bot_object: BotAI): return cls( bot_object._game_data.abilities[proto.ability_id],", "is_repairing(self) -> bool: \"\"\" Checks if the unit is an", "IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY,", "a pylon or warppism. \"\"\" return self._proto.is_powered @property def is_active(self)", "a Unit) or Point2 (if it is a Position) from", "\"\"\" return IS_BIOLOGICAL in self._type_data.attributes @property def is_mechanical(self) -> bool:", "bool = False) -> UnitCommand: \"\"\" Orders unit to train", "not need a 'target'. :param target: :param queue: \"\"\" return", "Union[int, float]: \"\"\" Returns the detection distance of the unit.", "Does not include upgrades \"\"\" return self._type_data._proto.armor @property def sight_range(self)", "Does not include upgrades. \"\"\" if self.can_attack_air: weapon = next((weapon", "-> Union[int, float]: \"\"\" Using the 2d distance between self", "return 0 return self._proto.energy / self._proto.energy_max @property def is_snapshot(self) ->", "sight_range(self) -> Union[int, float]: \"\"\" Returns the sight range of", "base.\"\"\" return self._proto.ideal_harvesters @property def surplus_harvesters(self) -> int: \"\"\" Returns", "give any information about the cloak status of the unit.\"\"\"", "land to connect to addon \"\"\" return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache", "def is_massive(self) -> bool: \"\"\" Checks if the unit has", "shield. # NOTE: Returns 0 for units without a shield.", "controlled by the bot. \"\"\" return self._proto.alliance == IS_MINE @property", "the maximum health of the unit. Does not include shields.", "has too few mining.\"\"\" return self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache def", "float]: \"\"\" Returns the amount of energy the unit has.", "Point2: \"\"\" Returns the 2d position of the unit. \"\"\"", "return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress, )", ":param ability: :param target: :param progress: \"\"\" self.ability = ability", "Test if a unit is able to cast an ability", "be a Unit or Point2. Attacking a position will make", "tag of the unit. \"\"\" return self._proto.tag @property def is_structure(self)", "+ unit_attack_range + bonus_distance) ** 2 ) def in_ability_cast_range( self,", "Returns 0 for units without a weapon. \"\"\" return self._proto.attack_upgrade_level", "Union[int, float]: \"\"\" Returns the dps against air units. Does", "isinstance(self.orders[0].target, int): return self.orders[0].target else: return Point2.from_proto(self.orders[0].target) return None @property", "bool: \"\"\" Function not completed yet :param other_unit: :param angle_error:", "Orders unit to research 'upgrade'. Requires UpgradeId to be passed", "Returns the percentage of shield points the unit has. Returns", "if the unit is powered by a pylon or warppism.", "deliver resources to townhall. Only works for own units. \"\"\"", "the amount of remaining frames of the visible timer bar.", "return IS_STRUCTURE in self._type_data.attributes @property def is_light(self) -> bool: \"\"\"", "weapon in self._weapons: if weapon.damage_bonus: b = weapon.damage_bonus[0] return (b.bonus,", "or warppism. \"\"\" return self._proto.is_powered @property def is_active(self) -> bool:", "race(self) -> Race: \"\"\" Returns the race of the unit", "please use is_idle instead\", DeprecationWarning, stacklevel=2) return self.is_idle @property def", "TYPE_CHECKING from .cache import property_immutable_cache, property_mutable_cache from .constants import (", "tuple of form '(bonus damage, armor type)' if unit does", "CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism. \"\"\" return {unit.tag", "target: Unit = None, queue: bool = False) -> UnitCommand:", "return 0 @property_immutable_cache def air_range(self) -> Union[int, float]: \"\"\" Returns", "int: \"\"\" Returns the upgrade level of the units armor.", "IS_SNAPSHOT @property def is_visible(self) -> bool: \"\"\" Checks if the", "-> Union[int, float]: \"\"\" Returns the armor of the unit.", "the unit is facing as a float in range [0,2π).", "can be attacked. \"\"\" return self._proto.cloak in CAN_BE_ATTACKED @property_immutable_cache def", "def has_vespene(self) -> bool: \"\"\" Checks if a geyser has", "in TARGET_GROUND), None) if weapon: return weapon.range return 0 @property_immutable_cache", "def is_burrowed(self) -> bool: \"\"\" Checks if the unit is", "queue=queue) def move(self, position: Union[Point2, Point3], queue: bool = False)", "a snapshot for the bot. Enemy buildings that have been", "Checks if the unit transforming. Only works for own units.", "Does not include shields. \"\"\" return self._proto.health_max @property def health_percentage(self)", "self._proto.buff_duration_remain @property def buff_duration_max(self) -> int: \"\"\" Returns the maximum", "a timer bar. \"\"\" return self._proto.buff_duration_remain @property def buff_duration_max(self) ->", "energy. \"\"\" return self._proto.energy @property def energy_max(self) -> Union[int, float]:", "queue=queue) def warp_in(self, unit: UnitTypeId, position: Union[Point2, Point3]) -> UnitCommand:", "unit: UnitTypeId, position: Union[Point2, Point3] = None, queue: bool =", "\"\"\" Test if a unit is able to cast an", "float]: \"\"\" Returns the dps against air units. Does not", "the armor of the unit. Does not include upgrades \"\"\"", "@property def attack_upgrade_level(self) -> int: \"\"\" Returns the upgrade level", "units armor. \"\"\" return self._proto.armor_upgrade_level @property def shield_upgrade_level(self) -> int:", "int: \"\"\" Returns the tag of the addon of unit.", "than one space. \"\"\" return self._proto.cargo_space_taken @property def has_cargo(self) ->", "import annotations import warnings from typing import Any, Dict, List,", "@property def radius(self) -> Union[int, float]: \"\"\" Half of unit", "@property def has_vespene(self) -> bool: \"\"\" Checks if a geyser", "amount of gas remaining in a geyser. \"\"\" return self._proto.vespene_contents", "unit_attack_range = self.air_range else: return False return ( self._bot_object._distance_squared_unit_to_unit(self, target)", "@property_immutable_cache def can_attack_both(self) -> bool: \"\"\" Checks if the unit", "unit has. Returns 0 for non-protoss units. \"\"\" return self._proto.shield", "def is_idle(self) -> bool: \"\"\" Checks if unit is idle.", "def is_revealed(self) -> bool: \"\"\" Checks if the unit is", "if self._proto.energy_max == 0: return 0 return self._proto.energy / self._proto.energy_max", "movement speed of the unit. Does not include upgrades or", "a mineral field or vespene geyser to mine. Only works", "\"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self) -> Point2: \"\"\" If", "__call__(self, ability, target=None, queue: bool = False): return UnitCommand(ability, self,", "@property def tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\" Building tech equality, e.g.", "@property_immutable_cache def air_range(self) -> Union[int, float]: \"\"\" Returns the range", "instead of AbilityId. :param upgrade: :param queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id,", "can attack both ground and air units. \"\"\" if self.type_id", "Checks if unit is idle. \"\"\" return not self._proto.orders def", "*args, **kwargs) def hold_position(self, queue: bool = False) -> UnitCommand:", "return True if self._weapons: return any(weapon.type in TARGET_AIR for weapon", "\"\"\" # TODO: Fix this because immovable units (sieged tank,", "\"\"\" Returns the time until the unit can fire again,", "False) -> UnitCommand: \"\"\" Orders the unit to return resource.", "any gas remaining. You can't build extractors on empty geysers.", "-> bool: \"\"\" Checks if the unit is detected by", "it is a Position) from the first order, returns None", "unit transforming. Only works for own units. \"\"\" return self.type_id", "bonus_distance) ** 2 ) def in_ability_cast_range( self, ability_id: AbilityId, target:", "if it is attacked, enemy unit is in range or", "same as OrbitalCommand For flying OrbitalCommand, this returns UnitTypeId.OrbitalCommand For", "UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE, IS_ENEMY, IS_CLOAKED,", "if self._weapons: return any(weapon.type in TARGET_AIR for weapon in self._weapons)", "Returns 0 for units without a timer bar. \"\"\" return", "is attacked, enemy unit is in range or other friendly", "can attack ground units. \"\"\" if self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}:", "return self._type_data.has_vespene @property def health(self) -> Union[int, float]: \"\"\" Returns", "Returns the range against air units. Does not include upgrades.", "( self._bot_object._distance_squared_unit_to_unit(self, target) <= (cast_range + self.radius + target.radius +", "a Unit or Point2. Attacking a position will make the", "@property_immutable_cache def is_carrying_vespene(self) -> bool: \"\"\" Checks if a worker", "/ self._proto.health_max @property def shield(self) -> Union[int, float]: \"\"\" Returns", "shield points the unit can have. Returns 0 for non-protoss", "resource. \"\"\" return not IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self) -> Union[int,", "2d distance between self and p. To calculate the 3d", "the unit is using one of the given abilities. Only", "It does not give any information about the cloak status", "vespene gas. \"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self) ->", "snipe, yamato if ability_target_type in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit):", "geyser or mining base.\"\"\" return self._proto.assigned_harvesters @property def ideal_harvesters(self) ->", "int: \"\"\" Returns the unique tag of the unit. \"\"\"", "+ target.radius + unit_attack_range + bonus_distance) ** 2 ) def", "Returns the target tag (if it is a Unit) or", "is no BuffId\" return buff in self.buffs def train(self, unit:", "when calculating distance to target. :param target: :param bonus_distance: \"\"\"", "position of the unit. \"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self)", "bonus_distance: float = 0 ) -> bool: \"\"\" Test if", "def has_buff(self, buff: BuffId) -> bool: \"\"\" Checks if unit", "-> Tuple[float, float]: \"\"\" Returns the 2d position of the", ":param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def research(self, upgrade:", "Union[Unit, Point2], bonus_distance: float = 0 ) -> bool: \"\"\"", "\"\"\" Returns the dps against air units. Does not include", "points the unit has. Returns 0 for non-protoss units. \"\"\"", "is an SCV or MULE that is currently repairing. Only", "float = 0 ) -> bool: \"\"\" Test if a", "can_attack_air(self) -> bool: \"\"\" Checks if the unit can air", "\"\"\" Checks if a unit is patrolling. Only works for", "has any gas remaining. You can't build extractors on empty", "a list of the current orders. \"\"\" return [UnitOrder.from_proto(order, self._bot_object)", "of the units attack. # NOTE: Returns 0 for units", "@property_immutable_cache def ground_range(self) -> Union[int, float]: \"\"\" Returns the range", "same unit type. \"\"\" unit_type = self._proto.unit_type if unit_type not", "self and p. To calculate the 3d distance, use unit.position3d.distance_to(p)", "-> Union[float, int]: \"\"\" How much cargo space is available", "Using the 2d distance between self and p. To calculate", "queue: \"\"\" return self(AbilityId.STOP, queue=queue) def patrol(self, position: Union[Point2, Point3],", "'light' attribute. \"\"\" return IS_LIGHT in self._type_data.attributes @property def is_armored(self)", "Union[Point2, Point3] = None, queue: bool = False) -> UnitCommand:", "\"\"\" Orders unit to attack. Target can be a Unit", "at maximum. \"\"\" return self._proto.cargo_space_max @property def cargo_left(self) -> Union[float,", "follow that unit) or Point2. :param position: :param queue: \"\"\"", "-> UnitCommand: \"\"\" Orders unit to research 'upgrade'. Requires UpgradeId", "of remaining frames of the visible timer bar. # NOTE:", "Set: \"\"\" Returns the set of current buffs the unit", "the target is in range. Includes the target's radius when", "UNIT_ORACLE}: return True if self._weapons: return any(weapon.type in TARGET_GROUND for", "is patrolling. Only works for own units. \"\"\" return self.is_using_ability(IS_PATROLLING)", "self.actions.append(unit.move(retreatPosition)) \"\"\" if self.can_attack: return self._proto.weapon_cooldown return -1 @property def", "): return ( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range + self.radius +", "self._proto.assigned_harvesters @property def ideal_harvesters(self) -> int: \"\"\" Returns the ideal", "if the unit is flying. \"\"\" return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM)", "weapon in self._weapons) return False @property_immutable_cache def ground_dps(self) -> Union[int,", "return IS_LIGHT in self._type_data.attributes @property def is_armored(self) -> bool: \"\"\"", "for own units. \"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self) ->", "currently repairing. Only works for own units. \"\"\" return self.is_using_ability(IS_REPAIRING)", "\"\"\" Checks if the unit has the 'light' attribute. \"\"\"", "unit can have. Returns 0 for non-protoss units. \"\"\" return", "self._proto.energy_max @property def is_snapshot(self) -> bool: \"\"\" Checks if the", "unit has. Returns 0 for non-protoss units. \"\"\" if self._proto.shield_max", "Union[int, float]: \"\"\" Returns the sight range of the unit.", "the unit is detected by a sensor tower. \"\"\" return", "def air_range(self) -> Union[int, float]: \"\"\" Returns the range against", "\"\"\" if self.can_attack_ground: weapon = next((weapon for weapon in self._weapons", "the screen. \"\"\" return self._proto.is_on_screen @property def is_blip(self) -> bool:", "geyser to mine. Only works for own units. \"\"\" return", "this way. \"\"\" return self._proto.display_type == IS_SNAPSHOT @property def is_visible(self)", "in self.buffs def train(self, unit: UnitTypeId, queue: bool = False)", "CommandCenter For Hive, this returns [UnitTypeId.Hatchery, UnitTypeId.Lair] For SCV, this", "Dict, List, Optional, Set, Tuple, Union, TYPE_CHECKING from .cache import", "radius when calculating distance to target. :param target: :param bonus_distance:", "'psionic' attribute. \"\"\" return IS_PSIONIC in self._type_data.attributes @property def tech_alias(self)", "Union, TYPE_CHECKING from .cache import property_immutable_cache, property_mutable_cache from .constants import", "return self._proto.mineral_contents @property def vespene_contents(self) -> int: \"\"\" Returns the", "return self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self) -> bool: \"\"\" Checks if", "def is_ready(self) -> bool: \"\"\" Checks if the unit is", "unit in self._proto.passengers} @property_mutable_cache def passengers_tags(self) -> Set[int]: \"\"\" Returns", "self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self) -> bool: \"\"\" Checks if the", "cargo space the unit needs. \"\"\" return self._type_data.cargo_size @property def", "Orders the unit to move to 'position'. Target can be", "ground units. \"\"\" if self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return True", "@property def armor(self) -> Union[int, float]: \"\"\" Returns the armor", "-> \"UnitTypeData\": \"\"\" Provides the unit type data. \"\"\" return", "frames of the visible timer bar. # NOTE: Returns 0", "IS_REVEALED @property def can_be_attacked(self) -> bool: \"\"\" Checks if the", "def shield(self) -> Union[int, float]: \"\"\" Returns the shield points", "+ target.radius + bonus_distance) ** 2 ) # For casting", "See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak @property def is_cloaked(self) -> bool:", "\"\"\" Checks if the unit is hostile. \"\"\" return self._proto.alliance", "air units. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return True if", "+ bonus_distance) ** 2 ) def in_ability_cast_range( self, ability_id: AbilityId,", ":param queue: \"\"\" return self(AbilityId.PATROL, target=position, queue=queue) def repair(self, repair_target:", "return True if self._weapons: return any(weapon.type in TARGET_BOTH for weapon", "been scouted and are in the fog of war or", "string of this form: Unit(name='SCV', tag=4396941328). \"\"\" return f\"Unit(name={self.name !r},", "target: :param bonus_distance: \"\"\" # TODO: Fix this because immovable", "@property_immutable_cache def type_id(self) -> UnitTypeId: \"\"\" UnitTypeId found in sc2/ids/unit_typeid.", "self.can_attack_ground: weapon = next((weapon for weapon in self._weapons if weapon.type", "of the unit as tuple without conversion to Point2. \"\"\"", "on its own if it is attacked, enemy unit is", "if self.can_attack_air: weapon = next((weapon for weapon in self._weapons if", "UNIT_BATTLECRUISER: return 6 if self.can_attack_air: weapon = next((weapon for weapon", "distance of the unit. \"\"\" return self._proto.detect_range @property_immutable_cache def is_detector(self)", "Half of unit size. See https://liquipedia.net/starcraft2/Unit_Statistics_(Legacy_of_the_Void) \"\"\" return self._proto.radius @property", "UnitTypeId, queue: bool = False) -> UnitCommand: \"\"\" Orders unit", "Only works for own units. \"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def", "self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self) -> bool: \"\"\" Checks if a", "inside a Bunker, CommandCenter, PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism.", "time until the unit can fire again, returns -1 for", "owner_id(self) -> int: \"\"\" Returns the owner of the unit.", "for own units. \"\"\" return self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self) ->", "Unit, bonus_distance: Union[int, float] = 0) -> bool: \"\"\" Checks", "@property def is_mine(self) -> bool: \"\"\" Checks if the unit", "unit. \"\"\" return self._type_data.name @property def race(self) -> Race: \"\"\"", "a sensor tower. \"\"\" return self._proto.is_blip @property def is_powered(self) ->", "the unit. \"\"\" return self._proto.cargo_space_max - self._proto.cargo_space_taken @property def assigned_harvesters(self)", "def order_target(self) -> Optional[Union[int, Point2]]: \"\"\" Returns the target tag", "Point3] = None, queue: bool = False) -> UnitCommand: \"\"\"", "in TARGET_AIR for weapon in self._weapons) return False @property_immutable_cache def", ":param queue: \"\"\" return self(AbilityId.STOP, queue=queue) def patrol(self, position: Union[Point2,", "self._proto.attack_upgrade_level @property def armor_upgrade_level(self) -> int: \"\"\" Returns the upgrade", "== 0: self.actions.append(unit.attack(target)) elif unit.weapon_cooldown < 0: self.actions.append(unit.move(closest_allied_unit_because_cant_attack)) else: self.actions.append(unit.move(retreatPosition))", "IS_PATROLLING, IS_GATHERING, IS_RETURNING, IS_COLLECTING, IS_CONSTRUCTING_SCV, IS_REPAIRING, IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, )", "if a unit is able to cast an ability on", "Returns the tag of the addon of unit. \"\"\" return", "a positive int if unit has too many harvesters mining,", "unit is only available as a snapshot for the bot.", "air_dps(self) -> Union[int, float]: \"\"\" Returns the dps against air", "repair. :param repair_target: :param queue: \"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue)", "@property def race(self) -> Race: \"\"\" Returns the race of", "UnitCommand: \"\"\" Orders Warpgate to warp in 'unit' at 'position'.", "\"\"\" return self(AbilityId.MOVE_MOVE, target=position, queue=queue) def scan_move(self, *args, **kwargs) ->", "UnitCommand: \"\"\" Orders a unit to gather minerals or gas.", "self._proto.shield_max == 0: return 0 return self._proto.shield / self._proto.shield_max @property", "return self._proto.is_burrowed @property def is_hallucination(self) -> bool: \"\"\" Returns True", "self._type_data.attributes @property def tech_alias(self) -> Optional[List[UnitTypeId]]: \"\"\" Building tech equality,", "import UnitTypeId from .position import Point2, Point3 from .unit_command import", "# TODO BATTLECRUISER doesnt have weapons in proto?! return bool(self._weapons)", "\"\"\" return IS_MECHANICAL in self._type_data.attributes @property def is_massive(self) -> bool:", "in self._weapons) return False @property_immutable_cache def ground_dps(self) -> Union[int, float]:", "'AbilityId.ATTACK' \"\"\" return self(AbilityId.SCAN_MOVE, *args, **kwargs) def hold_position(self, queue: bool", "to Point2. \"\"\" return self._proto.pos.x, self._proto.pos.y @property_immutable_cache def position(self) ->", "warnings from typing import Any, Dict, List, Optional, Set, Tuple,", "of the unit. Does not include upgrades \"\"\" return self._type_data._proto.armor", "will patrol in a circle. :param position: :param queue: \"\"\"", "queue=queue) def research(self, upgrade: UpgradeId, queue: bool = False) ->", "self(AbilityId.HARVEST_GATHER, target=target, queue=queue) def return_resource(self, target: Unit = None, queue:", "return self.tag == other.tag except: return False def __call__(self, ability,", "not include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return True", "int: \"\"\" Returns the owner of the unit. This is", "unit needs. \"\"\" return self._type_data.cargo_size @property def cargo_max(self) -> Union[float,", "UnitTypeId: \"\"\" UnitTypeId found in sc2/ids/unit_typeid. Caches all type_ids of", "the same as the starting point, the unit will patrol", "IS_MINE @property def is_enemy(self) -> bool: \"\"\" Checks if the", "on higher, not visible ground appear this way. \"\"\" return", "@property def is_active(self) -> bool: \"\"\" Checks if the unit", "import AbilityId from .ids.buff_id import BuffId from .ids.upgrade_id import UpgradeId", "the range against ground units. Does not include upgrades. \"\"\"", "def is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]]) -> bool: \"\"\" Check if", "the unit to return resource. Does not need a 'target'.", "weapons of the unit. \"\"\" try: return self._type_data._proto.weapons except: return", "return 6 if self.can_attack_air: weapon = next((weapon for weapon in", "about the cloak status of the unit.\"\"\" return self._proto.display_type ==", "\"\"\" return Point3.from_proto(self._proto.pos) def distance_to(self, p: Union[Unit, Point2, Point3]) ->", "self.orders[0].ability.id in abilities @property_immutable_cache def is_moving(self) -> bool: \"\"\" Checks", "if TYPE_CHECKING: from .bot_ai import BotAI from .game_data import AbilityData", "the ground, like queen creep tumor, ravager bile, HT storm", "gas remaining in a geyser. \"\"\" return self._proto.vespene_contents @property def", "Building type equality, e.g. FlyingOrbitalCommand is the same as OrbitalCommand", "return any(weapon.type in TARGET_GROUND for weapon in self._weapons) return False", "NOTE: Returns 0 for units without a timer bar. \"\"\"", "of the unit. Does not include shields. \"\"\" return self._proto.health_max", "__init__(self, ability: AbilityData, target, progress: float = None): \"\"\" :param", "return self._proto.is_selected @property def is_on_screen(self) -> bool: \"\"\" Checks if", "@property def has_cargo(self) -> bool: \"\"\" Checks if this unit", "is currently left in the unit. \"\"\" return self._proto.cargo_space_max -", "mine. Only works for own units. \"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache", "return self(self._bot_object._game_data.units[unit.value].creation_ability.id, target=position, queue=queue) def research(self, upgrade: UpgradeId, queue: bool", "a function that checks if this unit is facing another", "return self(AbilityId.ATTACK, target=target, queue=queue) def gather(self, target: Unit, queue: bool", "for weapon in self._weapons if weapon.type in TARGET_AIR), None) if", "yamato if ability_target_type in {Target.Unit.value, Target.PointOrUnit.value} and isinstance(target, Unit): return", "range [0,2π). 0 is in direction of x axis.\"\"\" return", "as a snapshot for the bot. Enemy buildings that have", "abilities @property_immutable_cache def is_moving(self) -> bool: \"\"\" Checks if the", "self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit: :param position: :param queue: \"\"\" return", "if self.type_id == UNIT_ORACLE: return 4 if self.type_id == UNIT_BATTLECRUISER:", "have a little lower range than this formula if self.can_attack_ground", "b = weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name) else: return None @property", "abilities = {abilities} return self.orders[0].ability.id in abilities @property_immutable_cache def is_moving(self)", "return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self) -> \"UnitTypeData\": \"\"\" Provides the", "hallucination or detected. \"\"\" return self._proto.is_hallucination @property def attack_upgrade_level(self) ->", "bool: \"\"\" Checks if the unit is a mineral field.", "PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism. \"\"\" return {Unit(unit, self._bot_object)", "or MULE to repair. :param repair_target: :param queue: \"\"\" return", "-> CloakState: \"\"\" Returns cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return", "\"\"\" return f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache def type_id(self) -> UnitTypeId:", "weapon.damage_bonus: b = weapon.damage_bonus[0] return (b.bonus, Attribute(b.attribute).name) else: return None", "if isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self, p) ** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple,", "@property def engaged_target_tag(self) -> int: # TODO What does this", "-> Optional[UnitTypeId]: \"\"\" Building type equality, e.g. FlyingOrbitalCommand is the", "unit is an SCV that is currently building. Only works", "if self._weapons: for weapon in self._weapons: if weapon.damage_bonus: b =", "currently gathering resources at a geyser or mining base.\"\"\" return", "== UNIT_BATTLECRUISER: return True if self._weapons: return any(weapon.type in TARGET_AIR", "bool: \"\"\" Checks if the unit has the 'mechanical' attribute.", "the upgrade level of the units armor. \"\"\" return self._proto.armor_upgrade_level", "Returns the name of the unit. \"\"\" return self._type_data.name @property", "except: return False def __call__(self, ability, target=None, queue: bool =", "maximum amount of energy the unit can have. Returns 0", "ability: :param target: :param progress: \"\"\" self.ability = ability self.target", "Returns the upgrade level of the units shield. # NOTE:", "/ self._proto.energy_max @property def is_snapshot(self) -> bool: \"\"\" Checks if", "return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self) -> bool: \"\"\" Checks if", "IS_DETECTOR, UNIT_PHOTONCANNON, UNIT_COLOSSUS, ) from .data import Alliance, Attribute, CloakState,", "return self._type_data.has_minerals @property def is_vespene_geyser(self) -> bool: \"\"\" Checks if", "Checks if the unit is an SCV that is currently", "has any units loaded. \"\"\" return bool(self._proto.cargo_space_taken) @property def cargo_size(self)", "@property def is_vespene_geyser(self) -> bool: \"\"\" Checks if the unit", "this returns None \"\"\" return self._type_data.tech_alias @property def unit_alias(self) ->", "OrbitalCommand For flying OrbitalCommand, this returns UnitTypeId.OrbitalCommand For SCV, this", "UnitCommand warnings.simplefilter(\"once\") if TYPE_CHECKING: from .bot_ai import BotAI from .game_data", "health the unit has. Does not include shields. \"\"\" if", "0 for non-protoss units. \"\"\" if self._proto.shield_max == 0: return", "not visible ground appear this way. \"\"\" return self._proto.display_type ==", "bar. \"\"\" return self._proto.buff_duration_remain @property def buff_duration_max(self) -> int: \"\"\"", "0 @property_immutable_cache def can_attack_air(self) -> bool: \"\"\" Checks if the", "in self._proto.passengers} @property def cargo_used(self) -> Union[float, int]: \"\"\" Returns", "'mechanical' attribute. \"\"\" return IS_MECHANICAL in self._type_data.attributes @property def is_massive(self)", "of energy the unit has. Returns 0 for units without", ":param other_unit: :param angle_error: \"\"\" pass @property def radius(self) ->", "on that base.\"\"\" return self._proto.ideal_harvesters @property def surplus_harvesters(self) -> int:", "self(AbilityId.ATTACK, target=target, queue=queue) def gather(self, target: Unit, queue: bool =", "bool: \"\"\" Checks if the unit has the 'psionic' attribute.", "the unit is completed. \"\"\" return self.build_progress == 1 @property", "property_mutable_cache from .constants import ( transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL,", "\"\"\" return self._proto.tag @property def is_structure(self) -> bool: \"\"\" Checks", "how much cargo space is currently used in the unit.", "or Point2. Attacking a position will make the unit move", "unit.\"\"\" return self._proto.display_type == IS_VISIBLE @property def alliance(self) -> Alliance:", "return not self._proto.orders def is_using_ability(self, abilities: Union[AbilityId, Set[AbilityId]]) -> bool:", "proto.HasField(\"target_world_space_pos\") else proto.target_unit_tag), proto.progress, ) def __init__(self, ability: AbilityData, target,", "is_visible(self) -> bool: \"\"\" Checks if the unit is visible", "\"\"\" return self._proto.is_on_screen @property def is_blip(self) -> bool: \"\"\" Checks", "is_mineral_field(self) -> bool: \"\"\" Checks if the unit is a", "of shield points the unit has. Returns 0 for non-protoss", "Has to be completed in order to detect and Photoncannons", "target=None, queue: bool = False): return UnitCommand(ability, self, target=target, queue=queue)", "in IS_DETECTOR or self.type_id == UNIT_PHOTONCANNON and self.is_powered) @property def", "\"\"\" Returns the upgrade level of the units shield. #", "attached. \"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self) -> Point2: \"\"\"", "return 4 if self.type_id == UNIT_BATTLECRUISER: return 6 if self.can_attack_ground:", "PlanetaryFortress, Medivac, Nydus, Overlord or WarpPrism. \"\"\" return {unit.tag for", "vision of the position of the unit! It does not", "to research 'upgrade'. Requires UpgradeId to be passed instead of", "float] = 0) -> bool: \"\"\" Checks if the target", "\"\"\" Returns completion in range [0,1].\"\"\" return self._proto.build_progress @property def", "return self._type_data._proto.armor @property def sight_range(self) -> Union[int, float]: \"\"\" Returns", "loaded. \"\"\" return bool(self._proto.cargo_space_taken) @property def cargo_size(self) -> Union[float, int]:", "+ self.radius + bonus_distance ) return False @property def facing(self)", "the tags of the units inside a Bunker, CommandCenter, PlanetaryFortress,", "energy_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage of amount", "Returns the health of the unit. Does not include shields.", "the a list of the current orders. \"\"\" return [UnitOrder.from_proto(order,", "@property_immutable_cache def ground_dps(self) -> Union[int, float]: \"\"\" Returns the dps", "\"\"\" Returns the amount of minerals remaining in a mineral", "= 0 ) -> bool: \"\"\" Test if a unit", "-> UnitCommand: \"\"\" Orders a unit to patrol between position", "amount of frames of the visible timer bar. # NOTE:", "self._proto.passengers} @property_mutable_cache def passengers_tags(self) -> Set[int]: \"\"\" Returns the tags", ":param progress: \"\"\" self.ability = ability self.target = target self.progress", "to stop, but can start to move on its own", "is the same as the starting point, the unit will", "maximum health of the unit. Does not include shields. \"\"\"", "\"\"\" Orders a unit to patrol between position it has", "UnitTypeId.Lair] For SCV, this returns None \"\"\" return self._type_data.tech_alias @property", "research(self, upgrade: UpgradeId, queue: bool = False) -> UnitCommand: \"\"\"", "seven patrol points. If the last point is the same", "also need to be powered. \"\"\" return self.is_ready and (self.type_id", "an SCV or MULE to repair. :param repair_target: :param queue:", "else: return False return ( self._bot_object._distance_squared_unit_to_unit(self, target) <= (self.radius +", "self._proto.ideal_harvesters @property def surplus_harvesters(self) -> int: \"\"\" Returns a positive", "return (weapon.damage * weapon.attacks) / weapon.speed return 0 @property_immutable_cache def", "a unit to patrol between position it has when the", "def name(self) -> str: \"\"\" Returns the name of the", "casting abilities that target other units, like transfuse, feedback, snipe,", "import BuffId from .ids.upgrade_id import UpgradeId from .ids.unit_typeid import UnitTypeId", "@property def energy_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage", "cloak status of the unit.\"\"\" return self._proto.display_type == IS_VISIBLE @property", "build(self, unit: UnitTypeId, position: Union[Point2, Point3] = None, queue: bool", "in range [0,2π). 0 is in direction of x axis.\"\"\"", "def buffs(self) -> Set: \"\"\" Returns the set of current", "def build(self, unit: UnitTypeId, position: Union[Point2, Point3] = None, queue:", "from .cache import property_immutable_cache, property_mutable_cache from .constants import ( transforming,", "self.progress = progress def __repr__(self) -> str: return f\"UnitOrder({self.ability}, {self.target},", "NOT POPULATED FOR ENEMIES @property_mutable_cache def orders(self) -> List[UnitOrder]: \"\"\"", "@property def name(self) -> str: \"\"\" Returns the name of", "if self.can_attack: return self._proto.weapon_cooldown return -1 @property def engaged_target_tag(self) ->", "self._proto.is_selected @property def is_on_screen(self) -> bool: \"\"\" Checks if the", "if self._weapons: return any(weapon.type in TARGET_BOTH for weapon in self._weapons)", "Returns the 2d position of the unit as tuple without", "-> UnitCommand: \"\"\" Orders a unit to stop, but can", "building. Only works for own units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache", "on the screen. \"\"\" return self._proto.is_on_screen @property def is_blip(self) ->", "-> UnitCommand: \"\"\" Orders unit to attack. Target can be", "return self._proto.cargo_space_max - self._proto.cargo_space_taken @property def assigned_harvesters(self) -> int: \"\"\"", "@property def is_enemy(self) -> bool: \"\"\" Checks if the unit", "Checks if the unit has the 'psionic' attribute. \"\"\" return", "bool = False) -> UnitCommand: \"\"\" Orders unit to build", "from .ids.upgrade_id import UpgradeId from .ids.unit_typeid import UnitTypeId from .position", "works for own units. \"\"\" return self.type_id in transforming and", "self.is_ready and (self.type_id in IS_DETECTOR or self.type_id == UNIT_PHOTONCANNON and", "= target self.progress = progress def __repr__(self) -> str: return", "extractors on empty geysers. \"\"\" return bool(self._proto.vespene_contents) @property def is_flying(self)", "'position'. :param unit: :param queue: \"\"\" normal_creation_ability = self._bot_object._game_data.units[unit.value].creation_ability.id return", "the unit has the 'psionic' attribute. \"\"\" return IS_PSIONIC in", "the position where a terran building has to land to", "worker is carrying vespene gas. \"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache", "return self._proto.cloak is IS_REVEALED @property def can_be_attacked(self) -> bool: \"\"\"", "return IS_MECHANICAL in self._type_data.attributes @property def is_massive(self) -> bool: \"\"\"", "if weapon: return weapon.range return 0 @property_immutable_cache def bonus_damage(self): \"\"\"", "return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self) -> bool: \"\"\" Checks if", "Checks if unit has an addon attached. \"\"\" return bool(self._proto.add_on_tag)", "self._proto.pos.x, self._proto.pos.y @property_immutable_cache def position(self) -> Point2: \"\"\" Returns the", "works for own units. \"\"\" return self.is_using_ability(IS_GATHERING) @property_immutable_cache def is_returning(self)", "upgrades or buffs. \"\"\" return self._type_data._proto.movement_speed @property def is_mineral_field(self) ->", "return weapon.range return 0 @property_immutable_cache def bonus_damage(self): \"\"\" Returns a", "shields. \"\"\" return self._proto.health_max @property def health_percentage(self) -> Union[int, float]:", "the unit. \"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self) -> Point3:", "train another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param queue: \"\"\"", "def detect_range(self) -> Union[int, float]: \"\"\" Returns the detection distance", "unit. \"\"\" return self._proto.add_on_tag @property def has_add_on(self) -> bool: \"\"\"", "the unit. \"\"\" try: return self._type_data._proto.weapons except: return None @property_immutable_cache", "queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self, unit: UnitTypeId, position:", "\"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self) -> bool: \"\"\"", ".data import Alliance, Attribute, CloakState, DisplayType, Race, TargetType, warpgate_abilities, TargetType,", ":param queue: \"\"\" return self(AbilityId.EFFECT_REPAIR, target=repair_target, queue=queue) def __hash__(self): return", "TYPE_CHECKING: from .bot_ai import BotAI from .game_data import AbilityData class", "works for own units. \"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache def is_attacking(self)", "to 'position'. Target can be a Unit (to follow that", "AbilityData, target, progress: float = None): \"\"\" :param ability: :param", "for unit. 3 for gas buildings, 2*n for n mineral", "return None @property_immutable_cache def can_attack(self) -> bool: \"\"\" Checks if", "bar. \"\"\" return self._proto.buff_duration_max # PROPERTIES BELOW THIS COMMENT ARE", "( self._bot_object._distance_pos_to_pos(self.position_tuple, target) <= cast_range + self.radius + bonus_distance )", "queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self, unit: UnitTypeId, position:", "if weapon.type in TARGET_GROUND), None) if weapon: return weapon.range return", "unit is returning from mineral field or vespene geyser to", "self._proto.is_on_screen @property def is_blip(self) -> bool: \"\"\" Checks if the", "self._proto.health_max @property def shield(self) -> Union[int, float]: \"\"\" Returns the", "@property_immutable_cache def is_gathering(self) -> bool: \"\"\" Checks if a unit", ":param p: \"\"\" if isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self, p) **", "self.tag == other.tag except: return False def __call__(self, ability, target=None,", "if the unit is completed. \"\"\" return self.build_progress == 1", "is_revealed(self) -> bool: \"\"\" Checks if the unit is revealed.", "(sieged tank, planetary fortress etc.) have a little lower range", "unit to research 'upgrade'. Requires UpgradeId to be passed instead", "Checks if a geyser has any gas remaining. You can't", "( transforming, IS_STRUCTURE, IS_LIGHT, IS_ARMORED, IS_BIOLOGICAL, IS_MECHANICAL, IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER,", "patch or a gas extraction building. :param target: :param queue:", "cloaked. \"\"\" return self._proto.cloak in IS_CLOAKED @property def is_revealed(self) ->", "detect_range(self) -> Union[int, float]: \"\"\" Returns the detection distance of", "0 for units without a weapon. \"\"\" return self._proto.attack_upgrade_level @property", "build another 'unit' at 'position'. Usage: self.actions.append(SCV.build(COMMANDCENTER, position)) :param unit:", "def is_structure(self) -> bool: \"\"\" Checks if the unit is", ".bot_ai import BotAI from .game_data import AbilityData class UnitOrder: @classmethod", "@property def add_on_tag(self) -> int: \"\"\" Returns the tag of", "'unit' at 'position'. :param unit: :param queue: \"\"\" normal_creation_ability =", "\"\"\" return self._type_data.name @property def race(self) -> Race: \"\"\" Returns", "the unit is a mineral field. \"\"\" return self._type_data.has_minerals @property", "@property_immutable_cache def is_returning(self) -> bool: \"\"\" Checks if a unit", "cargo space is currently used in the unit. Note that", "float]: \"\"\" Returns the health of the unit. Does not", "gas remaining. You can't build extractors on empty geysers. \"\"\"", "queue=queue) def build(self, unit: UnitTypeId, position: Union[Point2, Point3] = None,", "'Biological', 'Mechanical', 'Psionic', 'Massive', 'Structure'. \"\"\" # TODO: Consider units", "self._proto.buff_ids} @property_immutable_cache def is_carrying_minerals(self) -> bool: \"\"\" Checks if a", "for weapon in self._weapons) return False @property_immutable_cache def ground_dps(self) ->", "own units. \"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self) -> bool:", "to. \"\"\" return self._proto.alliance @property def is_mine(self) -> bool: \"\"\"", "unit. Does not include upgrades or buffs. \"\"\" return self._type_data._proto.movement_speed", "yet :param other_unit: :param angle_error: \"\"\" pass @property def radius(self)", "# Unit functions def has_buff(self, buff: BuffId) -> bool: \"\"\"", "units. \"\"\" if not self.orders: return False if isinstance(abilities, AbilityId):", "assigned_harvesters(self) -> int: \"\"\" Returns the number of workers currently", "self.is_idle @property def is_idle(self) -> bool: \"\"\" Checks if unit", "@property def sight_range(self) -> Union[int, float]: \"\"\" Returns the sight", "mineral patches on that base.\"\"\" return self._proto.ideal_harvesters @property def surplus_harvesters(self)", "your own hallucination or detected. \"\"\" return self._proto.is_hallucination @property def", "axis.\"\"\" return self._proto.facing # TODO: a function that checks if", "return self.is_idle @property def is_idle(self) -> bool: \"\"\" Checks if", "def has_cargo(self) -> bool: \"\"\" Checks if this unit has", "IS_CARRYING_RESOURCES.isdisjoint(self.buffs) @property def detect_range(self) -> Union[int, float]: \"\"\" Returns the", "OrbitalCommand, this returns UnitTypeId.OrbitalCommand For SCV, this returns None \"\"\"", "add_on_tag(self) -> int: \"\"\" Returns the tag of the addon", "2*n for n mineral patches on that base.\"\"\" return self._proto.ideal_harvesters", "of the unit. \"\"\" return self._type_data._proto.sight_range @property def movement_speed(self) ->", "import AbilityData class UnitOrder: @classmethod def from_proto(cls, proto, bot_object: BotAI):", "unit: UnitTypeId, position: Union[Point2, Point3]) -> UnitCommand: \"\"\" Orders Warpgate", "1 @property def cloak(self) -> CloakState: \"\"\" Returns cloak state.", "float]: \"\"\" Using the 2d distance between self and p.", "\"\"\" return self._proto.vespene_contents @property def has_vespene(self) -> bool: \"\"\" Checks", "is detected by a sensor tower. \"\"\" return self._proto.is_blip @property", "a Position) from the first order, returns None if the", "\"\"\" Returns the health of the unit. Does not include", "tank, planetary fortress etc.) have a little lower range than", "if self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return True if self._weapons: return", "carrying (gold-)minerals. \"\"\" return not IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self) ->", "unit to return resource. Does not need a 'target'. :param", "UNIT_COLOSSUS, ) from .data import Alliance, Attribute, CloakState, DisplayType, Race,", "etc.) have a little lower range than this formula if", "* weapon.attacks) / weapon.speed return 0 @property_immutable_cache def air_range(self) ->", "bool: \"\"\" Checks if the unit is cloaked. \"\"\" return", "self._proto.orders] @property_immutable_cache def order_target(self) -> Optional[Union[int, Point2]]: \"\"\" Returns the", "\"\"\" cast_range = self._bot_object._game_data.abilities[ability_id.value]._proto.cast_range assert cast_range > 0, f\"Checking for", "unit can attack at all. \"\"\" # TODO BATTLECRUISER doesnt", "range against ground units. Does not include upgrades. \"\"\" if", "include upgrades. \"\"\" if self.type_id == UNIT_ORACLE: return 4 if", "other units, like transfuse, feedback, snipe, yamato if ability_target_type in", "0 for units without energy. \"\"\" return self._proto.energy @property def", "unit in self._proto.passengers} @property def cargo_used(self) -> Union[float, int]: \"\"\"", "IS_MECHANICAL in self._type_data.attributes @property def is_massive(self) -> bool: \"\"\" Checks", "self._proto.is_burrowed @property def is_hallucination(self) -> bool: \"\"\" Returns True if", "Checks if the unit is a mineral field. \"\"\" return", "position: Union[Point2, Point3] = None, queue: bool = False) ->", "of energy the unit can have. Returns 0 for units", ":param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self, unit: UnitTypeId,", ") def __init__(self, ability: AbilityData, target, progress: float = None):", "COMMENT ARE NOT POPULATED FOR SNAPSHOTS @property def mineral_contents(self) ->", "return self._proto.weapon_cooldown return -1 @property def engaged_target_tag(self) -> int: #", "order to detect and Photoncannons also need to be powered.", "gather(self, target: Unit, queue: bool = False) -> UnitCommand: \"\"\"", "if the unit has the 'biological' attribute. \"\"\" return IS_BIOLOGICAL", "through research (like HT storm). :param ability_id: :param target: :param", "Checks if the unit is a structure. \"\"\" return IS_STRUCTURE", "Union[int, float]: \"\"\" Returns the maximum shield points the unit", "**kwargs) -> UnitCommand: \"\"\" Deprecated: This ability redirects to 'AbilityId.ATTACK'", "= next((weapon for weapon in self._weapons if weapon.type in TARGET_GROUND),", "to connect to addon \"\"\" return self.position.offset(Point2((-2.5, 0.5))) @property_mutable_cache def", "\"\"\" return self._type_data._proto.armor @property def sight_range(self) -> Union[int, float]: \"\"\"", "bar. # NOTE: Returns 0 for units without a timer", "Checks if the unit is moving. Only works for own", "\"\"\" Checks if the unit is cloaked. \"\"\" return self._proto.cloak", "-> Union[int, float]: \"\"\" Returns completion in range [0,1].\"\"\" return", "Function not completed yet :param other_unit: :param angle_error: \"\"\" pass", "@property def is_biological(self) -> bool: \"\"\" Checks if the unit", "self._proto.shield_max @property def shield_percentage(self) -> Union[int, float]: \"\"\" Returns the", "\"\"\" return self._proto.alliance == IS_MINE @property def is_enemy(self) -> bool:", "that some units take up more than one space. \"\"\"", "return buff in self.buffs def train(self, unit: UnitTypeId, queue: bool", "Optional, Set, Tuple, Union, TYPE_CHECKING from .cache import property_immutable_cache, property_mutable_cache", "bool: \"\"\" Checks if the unit can attack both ground", "IS_VISIBLE @property def alliance(self) -> Alliance: \"\"\" Returns the team", "this unit is facing another unit def is_facing_unit(self, other_unit: Unit,", "soon, please use is_idle instead\", DeprecationWarning, stacklevel=2) return self.is_idle @property", "a unit is able to cast an ability on the", "self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self) -> bool: \"\"\" Checks", "def is_attacking(self) -> bool: \"\"\" Checks if the unit is", "{Unit(unit, self._bot_object) for unit in self._proto.passengers} @property_mutable_cache def passengers_tags(self) ->", "POPULATED FOR ENEMIES @property_mutable_cache def orders(self) -> List[UnitOrder]: \"\"\" Returns", "def race(self) -> Race: \"\"\" Returns the race of the", "first order, returns None if the unit is idle \"\"\"", "def is_psionic(self) -> bool: \"\"\" Checks if the unit has", "units (sieged tank, planetary fortress etc.) have a little lower", "\"\"\" Returns True if the unit is your own hallucination", "attack at all. \"\"\" # TODO BATTLECRUISER doesnt have weapons", "unit.position3d.distance_to(p) :param p: \"\"\" if isinstance(p, Unit): return self._bot_object._distance_squared_unit_to_unit(self, p)", "space is currently used in the unit. Note that some", "@property def position_tuple(self) -> Tuple[float, float]: \"\"\" Returns the 2d", "from_proto(cls, proto, bot_object: BotAI): return cls( bot_object._game_data.abilities[proto.ability_id], (proto.target_world_space_pos if proto.HasField(\"target_world_space_pos\")", "return self._proto.assigned_harvesters - self._proto.ideal_harvesters @property_immutable_cache def weapon_cooldown(self) -> Union[int, float]:", "CloakState: \"\"\" Returns cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak", "air units. Does not include upgrades. \"\"\" if self.type_id ==", "Orders unit to attack. Target can be a Unit or", "units attack. # NOTE: Returns 0 for units without a", "@property def tag(self) -> int: \"\"\" Returns the unique tag", "def __init__(self, proto_data, bot_object: BotAI): \"\"\" :param proto_data: :param bot_object:", "hostile. \"\"\" return self._proto.alliance == IS_ENEMY @property def owner_id(self) ->", "bool: \"\"\" Checks if the unit is visible for the", "return bool(self._weapons) or self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE} @property_immutable_cache def can_attack_both(self)", "-> UnitCommand: \"\"\" Orders a unit to stop moving. It", "-> bool: \"\"\" Checks if the unit is idle. \"\"\"", "warnings.warn(\"noqueue will be removed soon, please use is_idle instead\", DeprecationWarning,", "Union[int, float]: \"\"\" Returns the health of the unit. Does", "def position3d(self) -> Point3: \"\"\" Returns the 3d position of", "timer bar. \"\"\" return self._proto.buff_duration_remain @property def buff_duration_max(self) -> int:", "move there and attack everything on its way. :param target:", "Returns 0 for non-protoss units. \"\"\" return self._proto.shield_max @property def", "position it has when the command starts and the target", "NOTE: Returns 0 for units without a shield. \"\"\" return", "information about the cloak status of the unit.\"\"\" return self._proto.display_type", "self._proto.pos.y @property_immutable_cache def position(self) -> Point2: \"\"\" Returns the 2d", "def ideal_harvesters(self) -> int: \"\"\" Returns the ideal harverster count", "queue=queue) def return_resource(self, target: Unit = None, queue: bool =", "target without checking ability cooldown (like stalker blink) or if", "is_returning(self) -> bool: \"\"\" Checks if a unit is returning", "\"\"\" Provides the unit type data. \"\"\" return self._bot_object._game_data.units[self._proto.unit_type] @property", "is_massive(self) -> bool: \"\"\" Checks if the unit has the", "UnitCommand: \"\"\" Order an SCV or MULE to repair. :param", "energy. \"\"\" if self._proto.energy_max == 0: return 0 return self._proto.energy", "but can start to move on its own if it", "\"\"\" Checks if the unit is moving. Only works for", "== 0: return 0 return self._proto.energy / self._proto.energy_max @property def", "import Alliance, Attribute, CloakState, DisplayType, Race, TargetType, warpgate_abilities, TargetType, Target", "-> bool: \"\"\" Checks if the unit has the 'biological'", "if the unit can attack ground units. \"\"\" if self.type_id", "is a mineral field. \"\"\" return self._type_data.has_minerals @property def is_vespene_geyser(self)", "a negative int if it has too few mining.\"\"\" return", "return None @property def noqueue(self) -> bool: \"\"\" Checks if", "facing(self) -> Union[int, float]: \"\"\" Returns direction the unit is", "if weapon.type in TARGET_GROUND), None) if weapon: return (weapon.damage *", "flying. \"\"\" return self._proto.is_flying or self.has_buff(BuffId.GRAVITONBEAM) @property def is_burrowed(self) ->", "Attacking a position will make the unit move there and", ") return False @property def facing(self) -> Union[int, float]: \"\"\"", "for units without energy. \"\"\" return self._proto.energy_max @property def energy_percentage(self)", "0.5))) @property_mutable_cache def passengers(self) -> Set[Unit]: \"\"\" Returns the units", "have. Returns 0 for non-protoss units. \"\"\" return self._proto.shield_max @property", "Returns the amount of cargo space the unit needs. \"\"\"", "cast_range > 0, f\"Checking for an ability ({ability_id}) that has", "own units. \"\"\" if not self.orders: return False if isinstance(abilities,", "-> Alliance: \"\"\" Returns the team the unit belongs to.", "unit type. \"\"\" unit_type = self._proto.unit_type if unit_type not in", "if the unit is moving. Only works for own units.", "-> bool: \"\"\" Checks if the unit can attack at", "@property_immutable_cache def order_target(self) -> Optional[Union[int, Point2]]: \"\"\" Returns the target", "armor. \"\"\" return self._proto.armor_upgrade_level @property def shield_upgrade_level(self) -> int: \"\"\"", "the movement speed of the unit. Does not include upgrades", "Returns the time until the unit can fire again, returns", "self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self) -> \"UnitTypeData\":", "HT storm). :param ability_id: :param target: :param bonus_distance: \"\"\" cast_range", "Possible armor typs are: 'Light', 'Armored', 'Biological', 'Mechanical', 'Psionic', 'Massive',", "works for own units. \"\"\" return self.is_using_ability(IS_ATTACKING) @property_immutable_cache def is_patrolling(self)", "return self._type_data._proto.sight_range @property def movement_speed(self) -> Union[int, float]: \"\"\" Returns", "not include upgrades. \"\"\" if self.type_id == UNIT_ORACLE: return 4", "self._proto.energy_max @property def energy_percentage(self) -> Union[int, float]: \"\"\" Returns the", "def __repr__(self) -> str: \"\"\" Returns string of this form:", "\"\"\" return self._proto.radius @property def build_progress(self) -> Union[int, float]: \"\"\"", "or detected. \"\"\" return self._proto.is_hallucination @property def attack_upgrade_level(self) -> int:", "Optional[UnitTypeId]: \"\"\" Building type equality, e.g. FlyingOrbitalCommand is the same", "at all. Does not include upgrades. \"\"\" if self.type_id ==", "equality, e.g. FlyingOrbitalCommand is the same as OrbitalCommand For flying", "for unit in self._proto.passengers} @property_mutable_cache def passengers_tags(self) -> Set[int]: \"\"\"", "energy. \"\"\" return self._proto.energy_max @property def energy_percentage(self) -> Union[int, float]:", "@property_immutable_cache def can_attack_air(self) -> bool: \"\"\" Checks if the unit", "def engaged_target_tag(self) -> int: # TODO What does this do?", "facing as a float in range [0,2π). 0 is in", "False) -> UnitCommand: \"\"\" Orders unit to research 'upgrade'. Requires", "include upgrades \"\"\" return self._type_data._proto.armor @property def sight_range(self) -> Union[int,", "unit is powered by a pylon or warppism. \"\"\" return", "unit. This is a value of 1 or 2 in", "== 1 @property def cloak(self) -> CloakState: \"\"\" Returns cloak", "\"\"\" Checks if a worker or MULE is carrying (gold-)minerals.", "def patrol(self, position: Union[Point2, Point3], queue: bool = False) ->", "return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self) -> bool: \"\"\" Checks if", "queen creep tumor, ravager bile, HT storm if ability_target_type in", "def stop(self, queue: bool = False) -> UnitCommand: \"\"\" Orders", "for weapon in self._weapons: if weapon.damage_bonus: b = weapon.damage_bonus[0] return", "self.is_using_ability(IS_REPAIRING) @property def add_on_tag(self) -> int: \"\"\" Returns the tag", "unit is using one of the given abilities. Only works", "if the unit is idle \"\"\" if self.orders: if isinstance(self.orders[0].target,", "is_hallucination(self) -> bool: \"\"\" Returns True if the unit is", "for the bot. NOTE: This means the bot has vision", "if the unit is an SCV that is currently building.", "return IS_ARMORED in self._type_data.attributes @property def is_biological(self) -> bool: \"\"\"", "unit is an SCV or MULE that is currently repairing.", "proto_data: :param bot_object: \"\"\" self._proto = proto_data self._bot_object = bot_object", "not include upgrades. \"\"\" if self.type_id == UNIT_BATTLECRUISER: return 6", "any(weapon.type in TARGET_GROUND for weapon in self._weapons) return False @property_immutable_cache", "the dps against ground units. Does not include upgrades. \"\"\"", "Overlord or WarpPrism. \"\"\" return {Unit(unit, self._bot_object) for unit in", "UnitTypeId, position: Union[Point2, Point3]) -> UnitCommand: \"\"\" Orders Warpgate to", "for weapon in self._weapons) return False @property_immutable_cache def can_attack_ground(self) ->", "bool: \"\"\" Checks if the unit is attacking. Only works", "the 'psionic' attribute. \"\"\" return IS_PSIONIC in self._type_data.attributes @property def", "add_on_land_position(self) -> Point2: \"\"\" If unit is addon (techlab or", "weapon.type in TARGET_AIR), None) if weapon: return weapon.range return 0", "def shield_percentage(self) -> Union[int, float]: \"\"\" Returns the percentage of", "int: \"\"\" Returns the maximum amount of frames of the", "all. \"\"\" # TODO BATTLECRUISER doesnt have weapons in proto?!", "of AbilityId. :param upgrade: :param queue: \"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue)", "Union[Point2, Point3]) -> UnitCommand: \"\"\" Orders Warpgate to warp in", "-> Union[int, float]: \"\"\" Returns the detection distance of the", "units. \"\"\" if self.type_id in {UNIT_BATTLECRUISER, UNIT_ORACLE}: return True if", "Union[float, int]: \"\"\" Returns the amount of cargo space the", "the upgrade level of the units attack. # NOTE: Returns", "\"\"\" Returns the unique tag of the unit. \"\"\" return", "self._proto.is_hallucination @property def attack_upgrade_level(self) -> int: \"\"\" Returns the upgrade", "bool: \"\"\" Checks if the unit has the 'armored' attribute.", "the maximum amount of frames of the visible timer bar.", "of the unit. \"\"\" return Point2.from_proto(self._proto.pos) @property_immutable_cache def position3d(self) ->", "\"\"\" Checks if the unit can attack both ground and", "+ self.radius + target.radius + bonus_distance) ** 2 ) #", "self._proto.add_on_tag @property def has_add_on(self) -> bool: \"\"\" Checks if unit", "the target's radius when calculating distance to target. :param target:", "return self._proto.add_on_tag @property def has_add_on(self) -> bool: \"\"\" Checks if", "Union[int, float]: \"\"\" Returns the armor of the unit. Does", "self.can_attack: return self._proto.weapon_cooldown return -1 @property def engaged_target_tag(self) -> int:", "appear this way. \"\"\" return self._proto.display_type == IS_SNAPSHOT @property def", "\"\"\" Checks if the unit is currently training or researching.", "-> Union[int, float]: \"\"\" Returns the shield points the unit", "def is_hallucination(self) -> bool: \"\"\" Returns True if the unit", "that is currently repairing. Only works for own units. \"\"\"", "return 0 @property_immutable_cache def ground_range(self) -> Union[int, float]: \"\"\" Returns", "float]: \"\"\" Returns the range against ground units. Does not", "self.can_attack_ground and not target.is_flying: unit_attack_range = self.ground_range elif self.can_attack_air and", "return bool(self._proto.add_on_tag) @property_immutable_cache def add_on_land_position(self) -> Point2: \"\"\" If unit", "not move until it gets new orders. :param queue: \"\"\"", "'Armored', 'Biological', 'Mechanical', 'Psionic', 'Massive', 'Structure'. \"\"\" # TODO: Consider", "unit is moving. Only works for own units. \"\"\" return", "return {Unit(unit, self._bot_object) for unit in self._proto.passengers} @property_mutable_cache def passengers_tags(self)", "weapon in self._weapons if weapon.type in TARGET_AIR), None) if weapon:", "(like stalker blink) or if ability is made available through", "bool: \"\"\" Checks if the unit is powered by a", "def ground_range(self) -> Union[int, float]: \"\"\" Returns the range against", "def movement_speed(self) -> Union[int, float]: \"\"\" Returns the movement speed", "returning. Only works for own units. \"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache", "Target.PointOrUnit.value} and isinstance( target, (Point2, tuple) ): return ( self._bot_object._distance_pos_to_pos(self.position_tuple,", "Target from .ids.ability_id import AbilityId from .ids.buff_id import BuffId from", "stalker blink) or if ability is made available through research", "\"\"\" Checks if the unit is burrowed. \"\"\" return self._proto.is_burrowed", "IS_CARRYING_MINERALS.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_vespene(self) -> bool: \"\"\" Checks if a", "\"\"\" Returns the upgrade level of the units armor. \"\"\"", "works for own units. \"\"\" return self.is_using_ability(IS_CONSTRUCTING_SCV) @property_immutable_cache def is_transforming(self)", "def is_detector(self) -> bool: \"\"\" Checks if the unit is", "another 'unit'. Usage: self.actions.append(COMMANDCENTER.train(SCV)) :param unit: :param queue: \"\"\" return", "SCV or MULE to repair. :param repair_target: :param queue: \"\"\"", "in self._bot_object._game_data.unit_types: self._bot_object._game_data.unit_types[unit_type] = UnitTypeId(unit_type) return self._bot_object._game_data.unit_types[unit_type] @property_immutable_cache def _type_data(self)", "than this formula if self.can_attack_ground and not target.is_flying: unit_attack_range =", "BotAI from .game_data import AbilityData class UnitOrder: @classmethod def from_proto(cls,", "self._type_data.unit_alias @property_immutable_cache def _weapons(self): \"\"\" Returns the weapons of the", "-> bool: \"\"\" Checks if the unit is an SCV", "AbilityId from .ids.buff_id import BuffId from .ids.upgrade_id import UpgradeId from", "@property def is_blip(self) -> bool: \"\"\" Checks if the unit", ":param unit: :param queue: \"\"\" return self(self._bot_object._game_data.units[unit.value].creation_ability.id, queue=queue) def build(self,", "False) -> UnitCommand: \"\"\" Orders a unit to patrol between", "\"\"\" Returns the movement speed of the unit. Does not", "a geyser. \"\"\" return self._proto.vespene_contents @property def has_vespene(self) -> bool:", "units take up more than one space. \"\"\" return self._proto.cargo_space_taken", "-> UnitCommand: \"\"\" Order an SCV or MULE to repair.", "other friendly units need the space. :param queue: \"\"\" return", "circle. :param position: :param queue: \"\"\" return self(AbilityId.PATROL, target=position, queue=queue)", "type'. Possible armor typs are: 'Light', 'Armored', 'Biological', 'Mechanical', 'Psionic',", "@property_immutable_cache def is_moving(self) -> bool: \"\"\" Checks if the unit", "patrolling. Only works for own units. \"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache", "air units. Does not include upgrades. \"\"\" if self.can_attack_air: weapon", "\"\"\" Orders the unit to return resource. Does not need", "f\"Unit(name={self.name !r}, tag={self.tag})\" @property_immutable_cache def type_id(self) -> UnitTypeId: \"\"\" UnitTypeId", "if the unit has the 'massive' attribute. \"\"\" return IS_MASSIVE", "UpgradeId from .ids.unit_typeid import UnitTypeId from .position import Point2, Point3", "Returns the amount of gas remaining in a geyser. \"\"\"", "to be powered. \"\"\" return self.is_ready and (self.type_id in IS_DETECTOR", "\"\"\" Returns how much cargo space is currently left in", "{} def __repr__(self) -> str: \"\"\" Returns string of this", "-> int: \"\"\" Returns the unique tag of the unit.", "return weapon.range return 0 @property_immutable_cache def can_attack_air(self) -> bool: \"\"\"", "PROPERTIES BELOW THIS COMMENT ARE NOT POPULATED FOR SNAPSHOTS @property", "from typing import Any, Dict, List, Optional, Set, Tuple, Union,", "position will make the unit move there and attack everything", "if self._proto.health_max == 0: return 0 return self._proto.health / self._proto.health_max", "move(self, position: Union[Point2, Point3], queue: bool = False) -> UnitCommand:", "IS_MASSIVE, IS_PSIONIC, UNIT_BATTLECRUISER, UNIT_ORACLE, TARGET_GROUND, TARGET_AIR, TARGET_BOTH, IS_SNAPSHOT, IS_VISIBLE, IS_MINE,", "> 0, f\"Checking for an ability ({ability_id}) that has no", "the unit has the 'biological' attribute. \"\"\" return IS_BIOLOGICAL in", "and (target.is_flying or target.type_id == UNIT_COLOSSUS): unit_attack_range = self.air_range else:", "\"\"\" self.ability = ability self.target = target self.progress = progress", "/ self._proto.shield_max @property def energy(self) -> Union[int, float]: \"\"\" Returns", "an SCV or MULE that is currently repairing. Only works", "position of the unit as tuple without conversion to Point2.", "self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self, target: Unit, bonus_distance: Union[int, float] =", "moving. Only works for own units. \"\"\" return self.is_using_ability(AbilityId.MOVE) @property_immutable_cache", "self._weapons: return any(weapon.type in TARGET_BOTH for weapon in self._weapons) return", "the percentage of health the unit has. Does not include", "unit_attack_range + bonus_distance) ** 2 ) def in_ability_cast_range( self, ability_id:", "for own units. \"\"\" return self.is_using_ability(IS_PATROLLING) @property_immutable_cache def is_gathering(self) ->", "unit as tuple without conversion to Point2. \"\"\" return self._proto.pos.x,", "unit. 3 for gas buildings, 2*n for n mineral patches", "== 0: return 0 return self._proto.health / self._proto.health_max @property def", "\"\"\" return self(AbilityId.SCAN_MOVE, *args, **kwargs) def hold_position(self, queue: bool =", "** 0.5 return self._bot_object.distance_math_hypot(self.position_tuple, p) def target_in_range(self, target: Unit, bonus_distance:", "return (b.bonus, Attribute(b.attribute).name) else: return None @property def armor(self) ->", "the unit can attack at all. \"\"\" # TODO BATTLECRUISER", "resource. Does not need a 'target'. :param target: :param queue:", "the target position. Can be queued up to seven patrol", "next((weapon for weapon in self._weapons if weapon.type in TARGET_AIR), None)", "if a unit is returning from mineral field or vespene", "Returns 0 for units without a shield. \"\"\" return self._proto.shield_upgrade_level", "-> Union[float, int]: \"\"\" Returns how much cargo space is", "the units armor. \"\"\" return self._proto.armor_upgrade_level @property def shield_upgrade_level(self) ->", "has the 'armored' attribute. \"\"\" return IS_ARMORED in self._type_data.attributes @property", "shield(self) -> Union[int, float]: \"\"\" Returns the shield points the", "-> bool: \"\"\" Checks if a unit is returning from", "def warp_in(self, unit: UnitTypeId, position: Union[Point2, Point3]) -> UnitCommand: \"\"\"", "in a mineral field. \"\"\" return self._proto.mineral_contents @property def vespene_contents(self)", "gas. \"\"\" return not IS_CARRYING_VESPENE.isdisjoint(self.buffs) @property_immutable_cache def is_carrying_resource(self) -> bool:", "and not target.is_flying: unit_attack_range = self.ground_range elif self.can_attack_air and (target.is_flying", "to stop moving. It will not move until it gets", "p: Union[Unit, Point2, Point3]) -> Union[int, float]: \"\"\" Using the", "in a two player game. \"\"\" return self._proto.owner @property def", "its way to a mineral field or vespene geyser to", "\"\"\" return self._proto.cargo_space_max @property def cargo_left(self) -> Union[float, int]: \"\"\"", "self._proto.health / self._proto.health_max @property def shield(self) -> Union[int, float]: \"\"\"", ":param bot_object: \"\"\" self._proto = proto_data self._bot_object = bot_object #", "range against air units. Does not include upgrades. \"\"\" if", "\"\"\" return self(self._bot_object._game_data.upgrades[upgrade.value].research_ability.id, queue=queue) def warp_in(self, unit: UnitTypeId, position: Union[Point2,", "bot_object: BotAI): \"\"\" :param proto_data: :param bot_object: \"\"\" self._proto =", ":param angle_error: \"\"\" pass @property def radius(self) -> Union[int, float]:", "return 0 @property_immutable_cache def can_attack_air(self) -> bool: \"\"\" Checks if", "Point2. \"\"\" return self._proto.pos.x, self._proto.pos.y @property_immutable_cache def position(self) -> Point2:", "is a structure. \"\"\" return IS_STRUCTURE in self._type_data.attributes @property def", "def is_gathering(self) -> bool: \"\"\" Checks if a unit is", "type equality, e.g. FlyingOrbitalCommand is the same as OrbitalCommand For", "Alliance: \"\"\" Returns the team the unit belongs to. \"\"\"", "\"\"\" return {unit.tag for unit in self._proto.passengers} @property def cargo_used(self)", "return self(AbilityId.PATROL, target=position, queue=queue) def repair(self, repair_target: Unit, queue: bool", "= progress def __repr__(self) -> str: return f\"UnitOrder({self.ability}, {self.target}, {self.progress})\"", "-> bool: \"\"\" Checks if the unit is cloaked. \"\"\"", "bool(self._proto.vespene_contents) @property def is_flying(self) -> bool: \"\"\" Checks if the", "unit. \"\"\" return self._proto.cargo_space_max - self._proto.cargo_space_taken @property def assigned_harvesters(self) ->", "\"\"\" assert isinstance(buff, BuffId), f\"{buff} is no BuffId\" return buff", "float]: \"\"\" Returns the dps against ground units. Does not", "without a timer bar. \"\"\" return self._proto.buff_duration_max # PROPERTIES BELOW", "\"\"\" Returns cloak state. See https://github.com/Blizzard/s2client-api/blob/d9ba0a33d6ce9d233c2a4ee988360c188fbe9dbf/include/sc2api/sc2_unit.h#L95 \"\"\" return self._proto.cloak @property", "-> bool: \"\"\" Checks if the unit is revealed or", "has the 'light' attribute. \"\"\" return IS_LIGHT in self._type_data.attributes @property", "-> bool: \"\"\" Checks if a worker is carrying vespene", "if unit has an addon attached. \"\"\" return bool(self._proto.add_on_tag) @property_immutable_cache", "in the unit. \"\"\" return self._proto.cargo_space_max - self._proto.cargo_space_taken @property def", "if the unit is hostile. \"\"\" return self._proto.alliance == IS_ENEMY", "2 ) # For casting abilities on the ground, like", "def passengers_tags(self) -> Set[int]: \"\"\" Returns the tags of the", "a little lower range than this formula if self.can_attack_ground and", "Checks if the unit can attack both ground and air", "\"\"\" return self._proto.shield_upgrade_level @property def buff_duration_remain(self) -> int: \"\"\" Returns", "bool: \"\"\" Checks if a unit is patrolling. Only works", "works for own units. \"\"\" return self.is_using_ability(IS_COLLECTING) @property_immutable_cache def is_constructing_scv(self)", "Checks if the unit has the 'armored' attribute. \"\"\" return", "self(AbilityId.STOP, queue=queue) def patrol(self, position: Union[Point2, Point3], queue: bool =", "def __init__(self, ability: AbilityData, target, progress: float = None): \"\"\"", "Returns the range against ground units. Does not include upgrades.", "Returns the maximum shield points the unit can have. Returns", "6 if self.can_attack_ground: weapon = next((weapon for weapon in self._weapons", "Returns a positive int if unit has too many harvesters", "UNIT_BATTLECRUISER: return True if self._weapons: return any(weapon.type in TARGET_AIR for", "None if the unit is idle \"\"\" if self.orders: if", "{self.progress})\" class Unit: def __init__(self, proto_data, bot_object: BotAI): \"\"\" :param", "@property def is_idle(self) -> bool: \"\"\" Checks if unit is", "queue=queue) def __hash__(self): return self.tag def __eq__(self, other): try: return", "\"\"\" Checks if the unit can air attack at all.", "'Massive', 'Structure'. \"\"\" # TODO: Consider units with ability attacks", "position3d(self) -> Point3: \"\"\" Returns the 3d position of the", "\"\"\" Returns the name of the unit. \"\"\" return self._type_data.name", "the addon of unit. \"\"\" return self._proto.add_on_tag @property def has_add_on(self)", "\"\"\" Checks if the unit is completed. \"\"\" return self.build_progress", "<= (cast_range + self.radius + target.radius + bonus_distance) ** 2", "DisplayType, Race, TargetType, warpgate_abilities, TargetType, Target from .ids.ability_id import AbilityId", "shield_max(self) -> Union[int, float]: \"\"\" Returns the maximum shield points", "\"\"\" return self._proto.add_on_tag @property def has_add_on(self) -> bool: \"\"\" Checks", "\"\"\" if self._proto.shield_max == 0: return 0 return self._proto.shield /", "= False) -> UnitCommand: \"\"\" Orders the unit to return", "points the unit can have. Returns 0 for non-protoss units.", "a mineral patch or a gas extraction building. :param target:", "if the unit is only available as a snapshot for" ]
[]
[ "= params.sample_time self.seed = seed self._last_CGM = 0 @classmethod def", "+ next(self._noise_generator) CGM = max(CGM, self._params[\"min\"]) CGM = min(CGM, self._params[\"max\"])", "seed(self): return self._seed @seed.setter def seed(self, seed): self._seed = seed", "CGMNoiseGenerator from .noise_gen import CGMNoise import pandas as pd import", "logger = logging.getLogger(__name__) class CGMSensor(object): def __init__(self, params, seed=None): self._params", "self._noise_generator = CGMNoise(self._params, seed=self.seed) self._last_CGM = 0 if __name__ ==", "= logging.getLogger(__name__) class CGMSensor(object): def __init__(self, params, seed=None): self._params =", "sensor_para_file, **kwargs): sensor_params = pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name == name].squeeze()", "params.sample_time self.seed = seed self._last_CGM = 0 @classmethod def withName(cls,", "= sensor_params.loc[sensor_params.Name == name].squeeze() return cls(params, **kwargs) def measure(self, patient):", "self.sample_time == 0: BG = patient.observation.Gsub CGM = BG +", "= CGM return CGM # Zero-Order Hold return self._last_CGM @property", "return self._seed @seed.setter def seed(self, seed): self._seed = seed self._noise_generator", "seed(self, seed): self._seed = seed self._noise_generator = CGMNoise(self._params, seed=seed) def", "def seed(self, seed): self._seed = seed self._noise_generator = CGMNoise(self._params, seed=seed)", "if patient.t % self.sample_time == 0: BG = patient.observation.Gsub CGM", "0 @classmethod def withName(cls, name, sensor_para_file, **kwargs): sensor_params = pd.read_csv(sensor_para_file)", "= max(CGM, self._params[\"min\"]) CGM = min(CGM, self._params[\"max\"]) self._last_CGM = CGM", "% self.sample_time == 0: BG = patient.observation.Gsub CGM = BG", "pd import logging logger = logging.getLogger(__name__) class CGMSensor(object): def __init__(self,", "import logging logger = logging.getLogger(__name__) class CGMSensor(object): def __init__(self, params,", "name].squeeze() return cls(params, **kwargs) def measure(self, patient): if patient.t %", "logger.debug('Resetting CGM sensor ...') self._noise_generator = CGMNoise(self._params, seed=self.seed) self._last_CGM =", "self._params[\"max\"]) self._last_CGM = CGM return CGM # Zero-Order Hold return", "as pd import logging logger = logging.getLogger(__name__) class CGMSensor(object): def", "from .noise_gen import CGMNoise import pandas as pd import logging", "seed=None): self._params = params self.name = params.Name self.sample_time = params.sample_time", "= BG + next(self._noise_generator) CGM = max(CGM, self._params[\"min\"]) CGM =", "CGMNoise(self._params, seed=self.seed) self._last_CGM = 0 if __name__ == '__main__': pass", "CGM = max(CGM, self._params[\"min\"]) CGM = min(CGM, self._params[\"max\"]) self._last_CGM =", "0: BG = patient.observation.Gsub CGM = BG + next(self._noise_generator) CGM", "Zero-Order Hold return self._last_CGM @property def seed(self): return self._seed @seed.setter", "...') self._noise_generator = CGMNoise(self._params, seed=self.seed) self._last_CGM = 0 if __name__", "= 0 @classmethod def withName(cls, name, sensor_para_file, **kwargs): sensor_params =", "params.Name self.sample_time = params.sample_time self.seed = seed self._last_CGM = 0", "self._noise_generator = CGMNoise(self._params, seed=seed) def reset(self): logger.debug('Resetting CGM sensor ...')", "return cls(params, **kwargs) def measure(self, patient): if patient.t % self.sample_time", "self.name = params.Name self.sample_time = params.sample_time self.seed = seed self._last_CGM", "self._last_CGM = 0 @classmethod def withName(cls, name, sensor_para_file, **kwargs): sensor_params", "= params.Name self.sample_time = params.sample_time self.seed = seed self._last_CGM =", "= min(CGM, self._params[\"max\"]) self._last_CGM = CGM return CGM # Zero-Order", "def seed(self): return self._seed @seed.setter def seed(self, seed): self._seed =", ".noise_gen import CGMNoiseGenerator from .noise_gen import CGMNoise import pandas as", "sensor_params = pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name == name].squeeze() return cls(params,", "CGM return CGM # Zero-Order Hold return self._last_CGM @property def", "**kwargs): sensor_params = pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name == name].squeeze() return", "CGM sensor ...') self._noise_generator = CGMNoise(self._params, seed=self.seed) self._last_CGM = 0", "def reset(self): logger.debug('Resetting CGM sensor ...') self._noise_generator = CGMNoise(self._params, seed=self.seed)", "**kwargs) def measure(self, patient): if patient.t % self.sample_time == 0:", "@classmethod def withName(cls, name, sensor_para_file, **kwargs): sensor_params = pd.read_csv(sensor_para_file) params", "CGMSensor(object): def __init__(self, params, seed=None): self._params = params self.name =", "max(CGM, self._params[\"min\"]) CGM = min(CGM, self._params[\"max\"]) self._last_CGM = CGM return", "def measure(self, patient): if patient.t % self.sample_time == 0: BG", "= pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name == name].squeeze() return cls(params, **kwargs)", "seed self._noise_generator = CGMNoise(self._params, seed=seed) def reset(self): logger.debug('Resetting CGM sensor", "return self._last_CGM @property def seed(self): return self._seed @seed.setter def seed(self,", "BG + next(self._noise_generator) CGM = max(CGM, self._params[\"min\"]) CGM = min(CGM,", "import CGMNoise import pandas as pd import logging logger =", "<gh_stars>0 # from .noise_gen import CGMNoiseGenerator from .noise_gen import CGMNoise", "# Zero-Order Hold return self._last_CGM @property def seed(self): return self._seed", "CGMNoise import pandas as pd import logging logger = logging.getLogger(__name__)", "seed): self._seed = seed self._noise_generator = CGMNoise(self._params, seed=seed) def reset(self):", "def __init__(self, params, seed=None): self._params = params self.name = params.Name", "self._last_CGM = CGM return CGM # Zero-Order Hold return self._last_CGM", "@seed.setter def seed(self, seed): self._seed = seed self._noise_generator = CGMNoise(self._params,", "Hold return self._last_CGM @property def seed(self): return self._seed @seed.setter def", "pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name == name].squeeze() return cls(params, **kwargs) def", "self._last_CGM @property def seed(self): return self._seed @seed.setter def seed(self, seed):", "self._seed = seed self._noise_generator = CGMNoise(self._params, seed=seed) def reset(self): logger.debug('Resetting", "params = sensor_params.loc[sensor_params.Name == name].squeeze() return cls(params, **kwargs) def measure(self,", "import CGMNoiseGenerator from .noise_gen import CGMNoise import pandas as pd", "logging logger = logging.getLogger(__name__) class CGMSensor(object): def __init__(self, params, seed=None):", "self._params = params self.name = params.Name self.sample_time = params.sample_time self.seed", "def withName(cls, name, sensor_para_file, **kwargs): sensor_params = pd.read_csv(sensor_para_file) params =", "CGMNoise(self._params, seed=seed) def reset(self): logger.debug('Resetting CGM sensor ...') self._noise_generator =", "measure(self, patient): if patient.t % self.sample_time == 0: BG =", ".noise_gen import CGMNoise import pandas as pd import logging logger", "withName(cls, name, sensor_para_file, **kwargs): sensor_params = pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name", "logging.getLogger(__name__) class CGMSensor(object): def __init__(self, params, seed=None): self._params = params", "sensor ...') self._noise_generator = CGMNoise(self._params, seed=self.seed) self._last_CGM = 0 if", "seed self._last_CGM = 0 @classmethod def withName(cls, name, sensor_para_file, **kwargs):", "__init__(self, params, seed=None): self._params = params self.name = params.Name self.sample_time", "CGM # Zero-Order Hold return self._last_CGM @property def seed(self): return", "self.seed = seed self._last_CGM = 0 @classmethod def withName(cls, name,", "= seed self._noise_generator = CGMNoise(self._params, seed=seed) def reset(self): logger.debug('Resetting CGM", "= patient.observation.Gsub CGM = BG + next(self._noise_generator) CGM = max(CGM,", "params self.name = params.Name self.sample_time = params.sample_time self.seed = seed", "BG = patient.observation.Gsub CGM = BG + next(self._noise_generator) CGM =", "min(CGM, self._params[\"max\"]) self._last_CGM = CGM return CGM # Zero-Order Hold", "pandas as pd import logging logger = logging.getLogger(__name__) class CGMSensor(object):", "next(self._noise_generator) CGM = max(CGM, self._params[\"min\"]) CGM = min(CGM, self._params[\"max\"]) self._last_CGM", "self._params[\"min\"]) CGM = min(CGM, self._params[\"max\"]) self._last_CGM = CGM return CGM", "params, seed=None): self._params = params self.name = params.Name self.sample_time =", "patient): if patient.t % self.sample_time == 0: BG = patient.observation.Gsub", "= CGMNoise(self._params, seed=self.seed) self._last_CGM = 0 if __name__ == '__main__':", "self.sample_time = params.sample_time self.seed = seed self._last_CGM = 0 @classmethod", "patient.t % self.sample_time == 0: BG = patient.observation.Gsub CGM =", "= params self.name = params.Name self.sample_time = params.sample_time self.seed =", "class CGMSensor(object): def __init__(self, params, seed=None): self._params = params self.name", "self._seed @seed.setter def seed(self, seed): self._seed = seed self._noise_generator =", "# from .noise_gen import CGMNoiseGenerator from .noise_gen import CGMNoise import", "CGM = BG + next(self._noise_generator) CGM = max(CGM, self._params[\"min\"]) CGM", "cls(params, **kwargs) def measure(self, patient): if patient.t % self.sample_time ==", "sensor_params.loc[sensor_params.Name == name].squeeze() return cls(params, **kwargs) def measure(self, patient): if", "CGM = min(CGM, self._params[\"max\"]) self._last_CGM = CGM return CGM #", "@property def seed(self): return self._seed @seed.setter def seed(self, seed): self._seed", "== 0: BG = patient.observation.Gsub CGM = BG + next(self._noise_generator)", "= seed self._last_CGM = 0 @classmethod def withName(cls, name, sensor_para_file,", "import pandas as pd import logging logger = logging.getLogger(__name__) class", "patient.observation.Gsub CGM = BG + next(self._noise_generator) CGM = max(CGM, self._params[\"min\"])", "name, sensor_para_file, **kwargs): sensor_params = pd.read_csv(sensor_para_file) params = sensor_params.loc[sensor_params.Name ==", "seed=seed) def reset(self): logger.debug('Resetting CGM sensor ...') self._noise_generator = CGMNoise(self._params,", "return CGM # Zero-Order Hold return self._last_CGM @property def seed(self):", "from .noise_gen import CGMNoiseGenerator from .noise_gen import CGMNoise import pandas", "== name].squeeze() return cls(params, **kwargs) def measure(self, patient): if patient.t", "reset(self): logger.debug('Resetting CGM sensor ...') self._noise_generator = CGMNoise(self._params, seed=self.seed) self._last_CGM", "= CGMNoise(self._params, seed=seed) def reset(self): logger.debug('Resetting CGM sensor ...') self._noise_generator" ]
[ "support was added in 2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost',", "+= ['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet=' +", "if self.spec.satisfies('@:2.1.999'): args += ['--with-boost=' + self.spec['boost'].prefix] args += ['CFLAGS=-O2',", "sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') #", "when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support was added in", "args += ['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet='", "when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool',", "# version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') #", "self.spec['lhapdf'].prefix] else: args += ['--with-lhapdf=' + self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args", "+ self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args += ['--with-rivet=' + self.spec['rivet'].prefix] if", "['--with-boost=' + self.spec['boost'].prefix] args += ['CFLAGS=-O2', 'CXXFLAGS=-O2', 'FFLAGS=-O2'] return args", "for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack", "depends_on('libtool', type='build') depends_on('m4', type='build') variant('hepmc', default='2', values=('2', '3'), description='HepMC interface", "\"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented out versions exist,", "sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013')", "# version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') #", "homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented out", "Thepeg(AutotoolsPackage): \"\"\"Toolkit for High Energy Physics Event Generation\"\"\" homepage =", "version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3',", "version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0',", "depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake', type='build')", "# version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa')", "patches # and/or recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') #", "sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a')", "sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0)", "depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build')", "+= ['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else: args += ['--with-lhapdf=' + self.spec['lhapdf'].prefix]", "(Apache-2.0 OR MIT) from spack import * class Thepeg(AutotoolsPackage): \"\"\"Toolkit", "version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0',", "sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0)", "in 2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build')", "when='@:2.1.999', msg='HepMC3 support was added in 2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet',", "type='build') variant('hepmc', default='2', values=('2', '3'), description='HepMC interface to build ')", "if self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args", "description='HepMC interface to build ') install_targets = ['install-strip'] def configure_args(self):", "install_targets = ['install-strip'] def configure_args(self): args = ['--with-gsl=' + self.spec['gsl'].prefix,", "sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04')", "recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6',", "version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1',", "version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0',", "version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1',", "self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet=' + self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'):", "def configure_args(self): args = ['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'):", "sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e')", "version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3',", "The commented out versions exist, but may need patches #", "type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') variant('hepmc', default='2', values=('2',", "values=('2', '3'), description='HepMC interface to build ') install_targets = ['install-strip']", "version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9')", "Lawrence Livermore National Security, LLC and other # Spack Project", "depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support was added", "'--without-javagui'] if self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else: args", "+ self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet=' + self.spec['fastjet'].prefix] if", "to build ') install_targets = ['install-strip'] def configure_args(self): args =", "['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet=' + self.spec['fastjet'].prefix]", "= ['install-strip'] def configure_args(self): args = ['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui']", "sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f')", "High Energy Physics Event Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url =", "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other", "sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') #", "# version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') #", "when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support was added in 2.2.0') depends_on('fastjet',", "2013-2020 Lawrence Livermore National Security, LLC and other # Spack", "# version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') #", "when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1',", "when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc',", "self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else: args += ['--with-lhapdf='", "patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch',", "version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') #", "# version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch',", "args += ['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else: args += ['--with-lhapdf=' +", "# version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281')", "self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args += ['--with-boost=' + self.spec['boost'].prefix] args +=", "+= ['--with-rivet=' + self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args += ['--with-boost=' +", "versions exist, but may need patches # and/or recipe changes", "# version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4',", "# version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch',", "sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045')", "args = ['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'): args +=", "# version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') #", "version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2',", "version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3',", "sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb')", "Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented", "version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4')", "type='build') depends_on('libtool', type='build') depends_on('m4', type='build') variant('hepmc', default='2', values=('2', '3'), description='HepMC", "Spack Project Developers. See the top-level COPYRIGHT file for details.", "Event Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The", "need patches # and/or recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8')", "version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') #", "sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') #", "National Security, LLC and other # Spack Project Developers. See", "variant('hepmc', default='2', values=('2', '3'), description='HepMC interface to build ') install_targets", "url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented out versions exist, but", "version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0',", "args += ['--with-hepmc=' + self.spec['hepmc'].prefix] else: args += ['--with-hepmc=' +", "out versions exist, but may need patches # and/or recipe", "sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7')", "self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args += ['--with-hepmc=' + self.spec['hepmc'].prefix] else: args", "OR MIT) from spack import * class Thepeg(AutotoolsPackage): \"\"\"Toolkit for", "depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') variant('hepmc', default='2', values=('2', '3'),", "self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args += ['--with-rivet=' + self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'):", "Security, LLC and other # Spack Project Developers. See the", "and/or recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8')", "level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2')", "+= ['--with-boost=' + self.spec['boost'].prefix] args += ['CFLAGS=-O2', 'CXXFLAGS=-O2', 'FFLAGS=-O2'] return", "\"\"\"Toolkit for High Energy Physics Event Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\"", "+ self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF=' + self.spec['lhapdf'].prefix]", "version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0',", "if self.spec.satisfies('@2.0.3:'): args += ['--with-rivet=' + self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args", "sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037')", "+= ['--with-lhapdf=' + self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args += ['--with-hepmc=' +", "self.spec.satisfies('@:2.1.999'): args += ['--with-boost=' + self.spec['boost'].prefix] args += ['CFLAGS=-O2', 'CXXFLAGS=-O2',", "# # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import *", "default='2', values=('2', '3'), description='HepMC interface to build ') install_targets =", "+= ['--with-hepmc=' + self.spec['hepmc'].prefix] else: args += ['--with-hepmc=' + self.spec['hepmc3'].prefix]", "depends_on('m4', type='build') variant('hepmc', default='2', values=('2', '3'), description='HepMC interface to build", "Energy Physics Event Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\"", "version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562')", "') install_targets = ['install-strip'] def configure_args(self): args = ['--with-gsl=' +", "# version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5')", "patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3',", "the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0", "depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3", "= ['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF='", "self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else:", "self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet=' + self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args +=", "file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from", "# Spack Project Developers. See the top-level COPYRIGHT file for", "# version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') #", "['install-strip'] def configure_args(self): args = ['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui'] if", "configure_args(self): args = ['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'): args", "conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support was added in 2.2.0') depends_on('fastjet', when='@2.0.0:')", "version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3',", "# version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') #", "Livermore National Security, LLC and other # Spack Project Developers.", "self.spec.satisfies('@2.0.3:'): args += ['--with-rivet=' + self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args +=", "# version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d')", "type='build') depends_on('m4', type='build') variant('hepmc', default='2', values=('2', '3'), description='HepMC interface to", "args += ['--with-boost=' + self.spec['boost'].prefix] args += ['CFLAGS=-O2', 'CXXFLAGS=-O2', 'FFLAGS=-O2']", "when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4',", "# version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') #", "args += ['--with-lhapdf=' + self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args += ['--with-hepmc='", "args += ['--with-rivet=' + self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args += ['--with-boost='", "sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b')", "self.spec['hepmc'].prefix] else: args += ['--with-hepmc=' + self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args", "version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2',", "class Thepeg(AutotoolsPackage): \"\"\"Toolkit for High Energy Physics Event Generation\"\"\" homepage", "# version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') #", "SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Thepeg(AutotoolsPackage):", "+ self.spec['lhapdf'].prefix] else: args += ['--with-lhapdf=' + self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'):", "was added in 2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:')", "spack import * class Thepeg(AutotoolsPackage): \"\"\"Toolkit for High Energy Physics", "2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake',", "self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'):", "exist, but may need patches # and/or recipe changes version('2.2.1',", "sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c')", "version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0',", "version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0',", "import * class Thepeg(AutotoolsPackage): \"\"\"Toolkit for High Energy Physics Event", "Physics Event Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" #", "depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') variant('hepmc', default='2',", "LLC and other # Spack Project Developers. See the top-level", "details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import", "commented out versions exist, but may need patches # and/or", "\"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented out versions exist, but may need", "self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if self.spec.satisfies('@2.0.0:'): args +=", "if self.spec.satisfies('@2.0.0:'): args += ['--with-fastjet=' + self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args", "= \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented out versions exist, but may", "for High Energy Physics Event Generation\"\"\" homepage = \"http://home.thep.lu.se/~leif/ThePEG/\" url", "version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39')", "# version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') #", "+ self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args += ['--with-boost=' + self.spec['boost'].prefix] args", "+= ['--with-hepmc=' + self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion=' +", "sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') #", "version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5',", "['--with-gsl=' + self.spec['gsl'].prefix, '--without-javagui'] if self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF=' +", "sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6')", "depends_on('boost', when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build')", "may need patches # and/or recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0',", "if self.spec.satisfies('@:1.8.999'): args += ['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else: args +=", "when='@2.1.1:') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') variant('hepmc',", "sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch', when='@1.8.3', level=0)", "args += ['--with-hepmc=' + self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion='", "sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2')", "['--with-LHAPDF=' + self.spec['lhapdf'].prefix] else: args += ['--with-lhapdf=' + self.spec['lhapdf'].prefix] if", "when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf')", "['--with-hepmc=' + self.spec['hepmc'].prefix] else: args += ['--with-hepmc=' + self.spec['hepmc3'].prefix] if", "# version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2',", "else: args += ['--with-hepmc=' + self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args +=", "Developers. See the top-level COPYRIGHT file for details. # #", "# The commented out versions exist, but may need patches", "version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') #", "sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2',", "from spack import * class Thepeg(AutotoolsPackage): \"\"\"Toolkit for High Energy", "# version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') #", "sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f')", "version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2',", "version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634') # version('2.0.2', sha256='d4249e019543d5c7520733292d2edfb0bdd9733177200a63837781ed6194789b') # version('2.0.1',", "version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2',", "depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support", "+ self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion=' + self.spec.variants['hepmc'].value] if", "when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support was", "Copyright 2013-2020 Lawrence Livermore National Security, LLC and other #", "sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0',", "sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1',", "+= ['--with-fastjet=' + self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args += ['--with-rivet=' +", "msg='HepMC3 support was added in 2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:')", "version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') # version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0',", "# SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class", "# version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') #", "+ self.spec['hepmc'].prefix] else: args += ['--with-hepmc=' + self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'):", "level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999',", "version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') #", "depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999',", "self.spec.satisfies('hepmc=2'): args += ['--with-hepmc=' + self.spec['hepmc'].prefix] else: args += ['--with-hepmc='", "but may need patches # and/or recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f')", "sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4',", "sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1',", "# version('2.0.1', sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1',", "sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4') # version('2.0.3', sha256='c57ba68fbfda06a0ba256e06f276f91434bf2529a13f6287c051a4cd6da44634')", "sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1', sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312')", "['--with-hepmc=' + self.spec['hepmc3'].prefix] if self.spec.satisfies('@2.2.0:'): args += ['--with-hepmcversion=' + self.spec.variants['hepmc'].value]", "level=0) patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0)", "'3'), description='HepMC interface to build ') install_targets = ['install-strip'] def", "sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f') version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2',", "interface to build ') install_targets = ['install-strip'] def configure_args(self): args", "depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3', when='@:2.1.999', msg='HepMC3 support was added in 2.2.0')", "version('2.1.5', sha256='c61a00fb6cf406f0f98e8c934683d8d5efcb655747842113abc92e9526e4b5e6') # version('2.1.4', sha256='400c37319aa967ed993fdbec84fc65b24f6cb3779fb1b173d7f5d7a56b772df5') version('2.1.3', sha256='16e8f6507530c2b80ed873ad22946efefed7355d15c7026f3465f18acebc1c0c') # version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49')", "= \"http://home.thep.lu.se/~leif/ThePEG/\" url = \"https://thepeg.hepforge.org/downloads/?f=ThePEG-2.2.1.tar.bz2\" # The commented out versions", "MIT) from spack import * class Thepeg(AutotoolsPackage): \"\"\"Toolkit for High", "+ self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args += ['--with-hepmc=' + self.spec['hepmc'].prefix] else:", "['--with-lhapdf=' + self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args += ['--with-hepmc=' + self.spec['hepmc'].prefix]", "* class Thepeg(AutotoolsPackage): \"\"\"Toolkit for High Energy Physics Event Generation\"\"\"", "build ') install_targets = ['install-strip'] def configure_args(self): args = ['--with-gsl='", "# version('1.4.1', sha256='156d06fd1ce68466d1f2adb9cc13f412b8b87073ec6a1d02102b173c34c29b8a') # version('1.4.0', sha256='b1f55e9a3bec713e9abf2fe71c5bd8cf8df936ea00b09f96df9123d0d5ab233f') # version('1.3.0', sha256='f731ebf3ce5a52b6d750d6e3c282fdc74d8ffd78bccb47b68f10a4daf44c7045') patch('thepeg-1.8.3.patch',", "See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier:", "# version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c') #", "and other # Spack Project Developers. See the top-level COPYRIGHT", "version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3', sha256='55ede3a3dd0bd07b90d0d49cf7ae28c18cd965780fdf53528508b97d57152fc7') # version('1.8.2', sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') #", "level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3') conflicts('hepmc=3',", "else: args += ['--with-lhapdf=' + self.spec['lhapdf'].prefix] if self.spec.satisfies('hepmc=2'): args +=", "sha256='13434dc7a8623cacb94c0b5c8d7e15b4c5d5187fe9322d1afc1c91b2c940102e') # version('1.7.0', sha256='40eb7196139a8bf4c35f5bb69818135943d534457df64aeb1cf60b6621435312') # version('1.6.1', sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680')", "# and/or recipe changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7',", "when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999') depends_on('hepmc', when='hepmc=2') depends_on('hepmc3', when='hepmc=3')", "added in 2.2.0') depends_on('fastjet', when='@2.0.0:') depends_on('rivet', when='@2.0.3:') depends_on('boost', when='@2.1.1:') depends_on('autoconf',", "if self.spec.satisfies('hepmc=2'): args += ['--with-hepmc=' + self.spec['hepmc'].prefix] else: args +=", "Project Developers. See the top-level COPYRIGHT file for details. #", "version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0', sha256='3ee58e5e3a26184567df1b9a10ca70df228e86f322e72f018dd7d8d5a4700a5d') version('1.8.3',", "# version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') #", "['--with-rivet=' + self.spec['rivet'].prefix] if self.spec.satisfies('@:2.1.999'): args += ['--with-boost=' + self.spec['boost'].prefix]", "other # Spack Project Developers. See the top-level COPYRIGHT file", "patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl') depends_on('lhapdf') depends_on('lhapdf@:6.2.999', when='@:1.9.999')", "sha256='ec284abdc82ceaf10a8736f908e7955f49f872b79aaa62d22aa33bc5c7679bdb') # version('2.0.0', sha256='571730cc956027dc82780dc04ef6e7382ab5ea853fcfebe259e488c6df302a04') version('1.9.2', sha256='ff7bbb256866f994dae04ade1f57c92d2670edaac3df11c9a300419a5343faf4') # version('1.9.1', sha256='8ec6d0669eba51e308be4e33aeb219999418170eae3aad93ec1491c942c2a4e9') version('1.9.0',", "args += ['--with-fastjet=' + self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args += ['--with-rivet='", "version('2.1.2', sha256='6a0f675a27e10863d495de069f25b892e532beb32e9cbfe5a58317d015387f49') version('2.1.1', sha256='e1b0bdc116fbc9a6e598b601f2aa670530cf2e1cd46b4572814a9b0130b10281') # version('2.1.0', sha256='fe6e7740ce3cd4a3ce3d7a0079a16c9214ad18f432e29d034ae763bfc40f3d39') # version('2.0.4', sha256='f3b625b411667e2708995f1d1379b5b8691406853c8c2cca2f4e4e6e062da0e4')", "sha256='5bc074b78f8b663a6a33df9c94dcaa3100269f8da59f9553a565298e55af270f') # version('1.6.0', sha256='c0ac06b70f3e8046fce4e49ba5916c9b49450f528d0e25f8f7f1427c62fec680') # version('1.5.0', sha256='ccbf102cf1d350a21487518d12e7e03e6e50010e5604f0201f256fa46a7a50c2') # version('1.4.2', sha256='40444304e40e07fd417a8ebf8e5c1cf07e895ceac52ef4f7c1eecc911f6f775c')", "['--with-fastjet=' + self.spec['fastjet'].prefix] if self.spec.satisfies('@2.0.3:'): args += ['--with-rivet=' + self.spec['rivet'].prefix]", "COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT)", "<reponame>carlabguillen/spack # Copyright 2013-2020 Lawrence Livermore National Security, LLC and", "sha256='44ccd0d70e42bb6ecd801a51bade6c25b3953c56f33017402d4f52ee6492dffa') # version('1.8.1', sha256='84c2a212a681545cddd541dca191eb65d96f41df86c87480b6f4f7d4f9683562') # version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187')", "version('1.8.0', sha256='4b22fda1078f410b999a23a17f611c9ae3a7f0f4cee4e83dc82c9336b7adf037') # version('1.7.3', sha256='066d5df74118d6e984bb60e1c0bea08a8edcbcf917d83d8bc32ec6fea0726187') # version('1.7.2', sha256='3b885c6c5a39b7399ccd45d1f5a866b7a65c96174a56a7ff4ae423347843d013') # version('1.7.1',", "top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR", "changes version('2.2.1', sha256='63abc7215e6ad45c11cf9dac013738e194cc38556a8368b850b70ab1b57ea58f') version('2.2.0', sha256='d3e1474811b7d9f61a4a98db1e9d60d8ef8f913a50de4cae4dc2cc4f98e6fbf8') # version('2.1.7', sha256='2e15727afc1fbfb158fa42ded31c4b1e5b51c25ed6bb66a38233e1fc594329c8') version('2.1.6', sha256='c1e51f83716bfca815b55100fbab3805ef5f9b9215e4373b22762693f5353f4f')", "patch('thepeg-1.9.0.patch', when='@1.9.0', level=0) patch('thepeg-1.9.2.patch', when='@1.9.2', level=0) patch('thepeg-2.1.1.patch', when='@2.1.1:2.2.1', level=0) depends_on('gsl')" ]
[ "the License is distributed on an “AS IS” BASIS, without", "self.GetPublicKeyFileName()] output = self.RunCmd(cmd) logging.info(output) cmd = [self.GetCertToolPath(), '--genCIScert', '--priv='", "these value are populated by the # appropriate Script self.__systemUrlHostname__", "+ self.GetCertFileName()) self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".priv\")", "# if we know the host name, put that into", "then reads the value from the os environment\"\"\" if (self.__skipInstallParams__", "in os.environ: param = os.environ[key] logging.debug('Env. param found : '", "self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__", "# VISL will wait until these value are populated by", "\"\"\" certool.py : This is the standard library function for", "# # Possible TODO : support IPv6 in certificates output", "don't need this value, # it is a technique on", "'-name', componentName, '-passout', 'pass:' + self.GetPassword()] output = self.RunCmd(cmd) logging.info(output)", "'yes', '1', 'skip']): self.__skipInstallParams__ = True if (not self.__vislInstall__ and", ": \" + self.GetCertFileName()) self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName", "as a main program, include usage information. \"\"\" certool.py :", "OS.Environment has the following defined. VMWARE_SKIP_VISL = True system.urlhostname vmdir.ldu-guid", "componentName, componentName + \".pfx\") logging.debug(\"pfx file Name : \" +", "+ self.GetPublicKeyFileName()] output = self.RunCmd(cmd) logging.info(output) cmd = [self.GetCertToolPath(), '--genCIScert',", "self.__skipInstallParams__ is False): errString = 'Unable to find install param", "\\ os.path.join(self.GetCertDir(), componentName, componentName + \".priv\") logging.debug(\"Private Key Name :", "self.FindEnvParams() self.GetVislParams() def GetHostName(self): return self.__systemUrlHostname__ def GetHostType(self): return self.__systemHosttype__", "os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\") except OSError as e: raise Exception(\"I/O", "+ self.__vislInstall__) def GetInstallParams(self, key): \"\"\" Waits on Install Parameter", "'posix'): return '/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name == \"nt\"): PROGRAM_FILES =", "vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir \"\"\" __copyright__ = \"Copyright 2012, VMware", "from VMWare Certificate Authority # More details. If this module", "= os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif (os.name == 'posix'):", "vmca.cert.password vmca.cert.dir \"\"\" __copyright__ = \"Copyright 2012, VMware Inc.\" __version__", "e.strerror)) # Generate Private Key and Public Keys First cmd", "\"Copyright 2012, VMware Inc.\" __version__ = 0.1 __author__ = \"VMware,", "GetPublicKeyFileName(self): return self.__publicKeyFile__ def GetPfxFileName(self): return self.__pfxFileName__ def GenCert(self, componentName):", "Or if the VMWARE_SKIP_VISL = True, then reads the value", "specific language governing permissions and limitations # under the License.", "\".crt\") logging.debug(\"cert File Name : \" + self.GetCertFileName()) self.__privateKeyFile__ =", "self.GetVislParams() def GetHostName(self): return self.__systemUrlHostname__ def GetHostType(self): return self.__systemHosttype__ def", "into the certificate if (self.GetHostType() == 'fqdn'): cmd.append('--FQDN=' + self.GetHostName())", "to return the value from visl. Or if the VMWARE_SKIP_VISL", "“License”); you may not # use this file except in", "\"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" # Please note that each of", "= \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\", "param script : ' + self.__vislInstall__) def GetInstallParams(self, key): \"\"\"", "we know the host name, put that into the certificate", "not running inside the cloudVM, set VMWARE_SKIP_VISL = True in", "\"\" __privateKeyFileName__ = \"\" __publicKeyFileName__ = \"\" __pfxFileName__ = \"\"", "until these value are populated by the # appropriate Script", "= \"\" def __init__(self): self.FindEnvParams() self.GetVislParams() def GetHostName(self): return self.__systemUrlHostname__", "This will enable this script to look for values in", "instead of VISL namespace.\"\"\" # Find VISL Install Parameter INSTALL_PARAM_ENV_VAR", "library function for cloudVM/vcenterwindows first boot to integrate with VMCA", "+ self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()] output = self.RunCmd(cmd) logging.info(output) cmd", "may not # use this file except in compliance with", "'--priv=' + self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()] output = self.RunCmd(cmd) logging.info(output)", "self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ =", "execute last cmd') else: return p.communicate()[0].rstrip() def GetVislParams(self): \"\"\" Waits", "'skip']): self.__skipInstallParams__ = True if (not self.__vislInstall__ and self.__skipInstallParams__ is", "warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED.", "note that each of this is a blocking call. #", "\"\"\" Waits on Install Parameter to return the value from", "cmd') else: return p.communicate()[0].rstrip() def GetVislParams(self): \"\"\" Waits for all", "\\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR)", "\" + dir) try: if not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\")", "Dir : \" + dir) try: if not os.path.exists(dir): os.makedirs(dir)", "'pass:' + self.GetPassword()] output = self.RunCmd(cmd) logging.info(output) def FindEnvParams(self): \"\"\"", "Authority # More details. If this module can be used", "self.__certfileName__ def GetPrivateKeyFileName(self): return self.__privateKeyFile__ def GetPublicKeyFileName(self): return self.__publicKeyFile__ def", "with certool PKCS12 capabilities cmd = [self.GetOpenSSLPath(), 'pkcs12', '-export', '-in',", "the env. block instead of VISL namespace.\"\"\" # Find VISL", "environment\"\"\" if (self.__skipInstallParams__ is False): cmd = [self.__vislInstall__, '-d', key]", "EXPRESS OR IMPLIED. See the # License for the specific", "= \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really", "+ \".priv\") logging.debug(\"Private Key Name : \" + self.GetPrivateKeyFileName()) self.__publicKeyFile__", "% VmcaCertool.GetPfxFileName() print 'Using Password : %s' % VmcaCertool.GetPassword() if", "self.__systemUrlHostname__ def GetHostType(self): return self.__systemHosttype__ def GetPassword(self): return self.__vmcaPassword__ def", "= os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name == 'posix'):", "'-d', key] output = self.RunCmd(cmd) logging.debug('Install param found :' +", "\\ os.path.join(self.GetCertDir(), componentName, componentName + \".pfx\") logging.debug(\"pfx file Name :", "return self.__systemUrlHostname__ def GetHostType(self): return self.__systemHosttype__ def GetPassword(self): return self.__vmcaPassword__", "# elif (self.GetHostType() == 'ipv6'): # # Possible TODO :", "GetHostName(self): return self.__systemUrlHostname__ def GetHostType(self): return self.__systemHosttype__ def GetPassword(self): return", "the Cert directory\"\"\" # Generate full file names for all", "First cmd = [self.GetCertToolPath(), '--genkey', '--priv=' + self.GetPrivateKeyFileName(), '--pub=' +", "%s' % args) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode:", "GetOpenSSLPath(self): if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES +", "Exception('Failed to execute last cmd') else: return p.communicate()[0].rstrip() def GetVislParams(self):", "Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror)) # Generate Private Key and Public", "GetHostType(self): return self.__systemHosttype__ def GetPassword(self): return self.__vmcaPassword__ def GetCertDir(self): return", "# Possible TODO : support IPv4 in certificates # elif", "= \"\" __skipInstallParams__ = False __certfileName__ = \"\" __privateKeyFileName__ =", "VMware, Inc. All Rights Reserved. # # Licensed under the", "componentName, componentName + \".crt\") logging.debug(\"cert File Name : \" +", "[self.GetOpenSSLPath(), 'pkcs12', '-export', '-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name',", "and limitations # under the License. # # Helper function", "__vislInstall__ = \"\" __systemUrlHostname__ = \"\" __systemHosttype__ = \"\" __vmcaPassword__", "'Unable to find install param script' logging.error(errString) raise Exception(errString) logging.debug('Using", "def GetInstallParams(self, key): \"\"\" Waits on Install Parameter to return", "VMCA First Boot if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return", "= self.RunCmd(cmd) logging.info(output) # TODO : Replace this with certool", "if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe')", "Reserved. # # Licensed under the Apache License, Version 2.0", "License. You may obtain a copy # of the License", "in the env. block instead of VISL namespace.\"\"\" # Find", "p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode: raise Exception('Failed to", "if you are not running inside the cloudVM, set VMWARE_SKIP_VISL", "OSError as e: raise Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror)) # Generate", "function that gets certificates from VMWare Certificate Authority # More", "certificates # elif (self.GetHostType() == 'ipv6'): # # Possible TODO", "the Certificates in the Cert directory\"\"\" # Generate full file", "this file except in compliance with the License. You may", "os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir : \" + dir) try: if not", "elif (self.GetHostType() == 'ipv4'): # # Possible TODO : support", ": ' + self.__vislInstall__) def GetInstallParams(self, key): \"\"\" Waits on", "\"\"\" Example Code Usage \"\"\" testComponent = 'sso' VmcaCertool =", "it is a technique on waiting for directory # first", "Generate full file names for all artifacts self.__certfileName__ = \\", "Parameter to return the value from visl. Or if the", "os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif (os.name == 'posix'): return '/opt/vmware/bin/certool' def", "def GetPassword(self): return self.__vmcaPassword__ def GetCertDir(self): return self.__vmcaCertPath__ def GetCertFileName(self):", "os environment\"\"\" if (self.__skipInstallParams__ is False): cmd = [self.__vislInstall__, '-d',", "VISL will wait until these value are populated by the", "os.makedirs(dir) logging.debug(\"Created directory\") except OSError as e: raise Exception(\"I/O error({0}):", "obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 #", "include usage information. \"\"\" certool.py : This is the standard", "certool.py : This is the standard library function for cloudVM/vcenterwindows", ": This is the standard library function for cloudVM/vcenterwindows first", "the OS.Environment has the following defined. VMWARE_SKIP_VISL = True system.urlhostname", "componentName + \".priv\") logging.debug(\"Private Key Name : \" + self.GetPrivateKeyFileName())", "Find VISL Install Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL'", "os.environ[VMWARE_SKIP_VISL] if (skip in ['true', 'True', 'yes', '1', 'skip']): self.__skipInstallParams__", "software # distributed under the License is distributed on an", "= [self.__vislInstall__, '-d', key] output = self.RunCmd(cmd) logging.debug('Install param found", "componentName + \".crt\") logging.debug(\"cert File Name : \" + self.GetCertFileName())", "may obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0", "logging.debug('Using install param script : ' + self.__vislInstall__) def GetInstallParams(self,", "full file names for all artifacts self.__certfileName__ = \\ os.path.join(self.GetCertDir(),", "\"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\"", "'Using Password : %s' % VmcaCertool.GetPassword() if __name__ == \"__main__\":", "TODO : Replace this with certool PKCS12 capabilities cmd =", "\"\" __skipInstallParams__ = False __certfileName__ = \"\" __privateKeyFileName__ = \"\"", "Key Name : \" + self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(),", "\"\" __systemUrlHostname__ = \"\" __systemHosttype__ = \"\" __vmcaPassword__ = \"\"", "os.path.join(self.GetCertDir(), componentName, componentName + \".priv\") logging.debug(\"Private Key Name : \"", "componentName + \".pub\") logging.debug(\"Public Key Name : \" + self.GetPublicKeyFileName())", "support IPv6 in certificates output = self.RunCmd(cmd) logging.info(output) # TODO", "'Generated a pfx file : %s' % VmcaCertool.GetPfxFileName() print 'Using", "logging.info(output) def FindEnvParams(self): \"\"\" Finds the Default Environment parameters. if", "defined. VMWARE_SKIP_VISL = True system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir \"\"\"", "= 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if", ": support IPv4 in certificates # elif (self.GetHostType() == 'ipv6'):", "under the Apache License, Version 2.0 (the “License”); you may", "logging import os import subprocess class CerTool: __vislInstall__ = \"\"", "file except in compliance with the License. You may obtain", "dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir : \" + dir) try:", "'--pub=' + self.GetPublicKeyFileName()] output = self.RunCmd(cmd) logging.info(output) cmd = [self.GetCertToolPath(),", "Keys First cmd = [self.GetCertToolPath(), '--genkey', '--priv=' + self.GetPrivateKeyFileName(), '--pub='", "FindEnvParams(self): \"\"\" Finds the Default Environment parameters. if you are", "the certificate if (self.GetHostType() == 'fqdn'): cmd.append('--FQDN=' + self.GetHostName()) #", "This is the standard library function for cloudVM/vcenterwindows first boot", "all artifacts self.__certfileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".crt\")", "Publish Certool Path from VMCA First Boot if(os.name == \"nt\"):", "__privateKeyFileName__ = \"\" __publicKeyFileName__ = \"\" __pfxFileName__ = \"\" def", "logging.info('running %s' % args) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if", "the specific language governing permissions and limitations # under the", "file Name : \" + self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target", "False): cmd = [self.__vislInstall__, '-d', key] output = self.RunCmd(cmd) logging.debug('Install", "'/usr/lib/vmware-openSSL/openssl' def main(): \"\"\" Example Code Usage \"\"\" testComponent =", "pfx file : %s' % VmcaCertool.GetPfxFileName() print 'Using Password :", "Name : \" + self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir", "your environment. This will enable this script to look for", "parameters that VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID =", "raise Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror)) # Generate Private Key and", "' + param) return param else: raise Exception('Requested Value not", "# Generate Private Key and Public Keys First cmd =", "self.GetHostName()) # elif (self.GetHostType() == 'ipv4'): # # Possible TODO", "the License. # # Helper function that gets certificates from", ": Publish Certool Path from VMCA First Boot if(os.name ==", "\"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif (os.name", "boot to finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns the", "'-export', '-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name', componentName, '-passout',", "class CerTool: __vislInstall__ = \"\" __systemUrlHostname__ = \"\" __systemHosttype__ =", "#!/usr/bin/env python # # Copyright © 2012-2016 VMware, Inc. All", "running inside the cloudVM, set VMWARE_SKIP_VISL = True in your", "with the License. You may obtain a copy # of", "return self.__vmcaPassword__ def GetCertDir(self): return self.__vmcaCertPath__ def GetCertFileName(self): return self.__certfileName__", "# warranties or conditions of any kind, EITHER EXPRESS OR", "return p.communicate()[0].rstrip() def GetVislParams(self): \"\"\" Waits for all VISL parameters", "\"\"\" Waits for all VISL parameters that VMCA certool needs\"\"\"", "# under the License. # # Helper function that gets", ": \" + dir) try: if not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created", "= [self.GetOpenSSLPath(), 'pkcs12', '-export', '-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(),", "output) return output else: if val in os.environ: param =", "command\"\"\" logging.info('running %s' % args) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)", "without # warranties or conditions of any kind, EITHER EXPRESS", "['true', 'True', 'yes', '1', 'skip']): self.__skipInstallParams__ = True if (not", "= 'sso' VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent) print 'Generated a pfx", "'-passout', 'pass:' + self.GetPassword()] output = self.RunCmd(cmd) logging.info(output) def FindEnvParams(self):", "\"\"\" Runs a given command\"\"\" logging.info('running %s' % args) p", "+ key) def RunCmd(self, args): \"\"\" Runs a given command\"\"\"", "to in writing, software # distributed under the License is", "= \"\" __systemHosttype__ = \"\" __vmcaPassword__ = \"\" __vmcaCertPath__ =", "True in your environment. This will enable this script to", "errString = 'Unable to find install param script' logging.error(errString) raise", "if not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\") except OSError as e:", "= os.environ[key] logging.debug('Env. param found : ' + param) return", "os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name == 'posix'): return", "on an “AS IS” BASIS, without # warranties or conditions", "__vmcaCertPath__ = \"\" __skipInstallParams__ = False __certfileName__ = \"\" __privateKeyFileName__", "elif (self.GetHostType() == 'ipv6'): # # Possible TODO : support", "Version 2.0 (the “License”); you may not # use this", "inside the cloudVM, set VMWARE_SKIP_VISL = True in your environment.", "# Licensed under the Apache License, Version 2.0 (the “License”);", "Exception('Requested Value not found in Env : ' + key)", "or agreed to in writing, software # distributed under the", "return param else: raise Exception('Requested Value not found in Env", "required by applicable law or agreed to in writing, software", "= \"\" __vmcaPassword__ = \"\" __vmcaCertPath__ = \"\" __skipInstallParams__ =", "given command\"\"\" logging.info('running %s' % args) p = subprocess.Popen(args, stdout=subprocess.PIPE,", "self.__vmcaCertPath__ def GetCertFileName(self): return self.__certfileName__ def GetPrivateKeyFileName(self): return self.__privateKeyFile__ def", "install param script' logging.error(errString) raise Exception(errString) logging.debug('Using install param script", "self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".priv\") logging.debug(\"Private Key", "GetCertToolPath(self): \"\"\"returns the path to certool\"\"\" #TODO : Publish Certool", "self.__privateKeyFile__ def GetPublicKeyFileName(self): return self.__publicKeyFile__ def GetPfxFileName(self): return self.__pfxFileName__ def", "return self.__privateKeyFile__ def GetPublicKeyFileName(self): return self.__publicKeyFile__ def GetPfxFileName(self): return self.__pfxFileName__", "# it is a technique on waiting for directory #", "= 'Unable to find install param script' logging.error(errString) raise Exception(errString)", "INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" # Please note that each of this", "True, then reads the value from the os environment\"\"\" if", "find install param script' logging.error(errString) raise Exception(errString) logging.debug('Using install param", "raise Exception(errString) logging.debug('Using install param script : ' + self.__vislInstall__)", "# Copyright © 2012-2016 VMware, Inc. All Rights Reserved. #", "you may not # use this file except in compliance", "def GetPublicKeyFileName(self): return self.__publicKeyFile__ def GetPfxFileName(self): return self.__pfxFileName__ def GenCert(self,", "the following defined. VMWARE_SKIP_VISL = True system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password", "in os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in os.environ: skip", "All Rights Reserved. # # Licensed under the Apache License,", "agreed to in writing, software # distributed under the License", "host name, put that into the certificate if (self.GetHostType() ==", "be used as a main program, include usage information. \"\"\"", "GetPrivateKeyFileName(self): return self.__privateKeyFile__ def GetPublicKeyFileName(self): return self.__publicKeyFile__ def GetPfxFileName(self): return", "except OSError as e: raise Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror)) #", "Waits on Install Parameter to return the value from visl.", "True if (not self.__vislInstall__ and self.__skipInstallParams__ is False): errString =", "BASIS, without # warranties or conditions of any kind, EITHER", "= CerTool() VmcaCertool.GenCert(testComponent) print 'Generated a pfx file : %s'", "Inc. All Rights Reserved. # # Licensed under the Apache", "GenCert(self, componentName): \"\"\" Generates the Certificates in the Cert directory\"\"\"", "INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" # Please note that", "boot to integrate with VMCA Certificate Generation. if not running", "'/VMware/CIS/OpenSSL/openssl.exe') elif (os.name == 'posix'): return '/usr/lib/vmware-openSSL/openssl' def main(): \"\"\"", "reads the value from the os environment\"\"\" if (self.__skipInstallParams__ is", "self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) #", "+ \".pfx\") logging.debug(\"pfx file Name : \" + self.GetPfxFileName()) dir", "logging.debug('Env. param found : ' + param) return param else:", "VMware Inc.\" __version__ = 0.1 __author__ = \"VMware, Inc.\" import", "return self.__pfxFileName__ def GenCert(self, componentName): \"\"\" Generates the Certificates in", "= 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__", "Private Key and Public Keys First cmd = [self.GetCertToolPath(), '--genkey',", "If this module can be used as a main program,", "finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns the path to", "+ self.GetHostName()) # elif (self.GetHostType() == 'ipv4'): # # Possible", "\" + self.GetPublicKeyFileName()) self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName +", "the # appropriate Script self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ =", "VMWARE_SKIP_VISL = True, then reads the value from the os", "waiting for directory # first boot to finish. discardldu =", "if p.returncode: raise Exception('Failed to execute last cmd') else: return", "os.path.join(self.GetCertDir(), componentName, componentName + \".pfx\") logging.debug(\"pfx file Name : \"", "logging.debug(\"Target Dir : \" + dir) try: if not os.path.exists(dir):", "args): \"\"\" Runs a given command\"\"\" logging.info('running %s' % args)", "last cmd') else: return p.communicate()[0].rstrip() def GetVislParams(self): \"\"\" Waits for", "self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ =", "output = self.RunCmd(cmd) logging.info(output) def FindEnvParams(self): \"\"\" Finds the Default", "writing, software # distributed under the License is distributed on", "def __init__(self): self.FindEnvParams() self.GetVislParams() def GetHostName(self): return self.__systemUrlHostname__ def GetHostType(self):", "def GetCertFileName(self): return self.__certfileName__ def GetPrivateKeyFileName(self): return self.__privateKeyFile__ def GetPublicKeyFileName(self):", "needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\"", "+ \".pub\") logging.debug(\"Public Key Name : \" + self.GetPublicKeyFileName()) self.__pfxFileName__", "this with certool PKCS12 capabilities cmd = [self.GetOpenSSLPath(), 'pkcs12', '-export',", "a technique on waiting for directory # first boot to", "artifacts self.__certfileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".crt\") logging.debug(\"cert", "any kind, EITHER EXPRESS OR IMPLIED. See the # License", "Name : \" + self.GetPublicKeyFileName()) self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(), componentName,", "\"\"\"returns the path to certool\"\"\" #TODO : Publish Certool Path", "the cloudVM, set VMWARE_SKIP_VISL = True in your environment. This", "0.1 __author__ = \"VMware, Inc.\" import logging import os import", "permissions and limitations # under the License. # # Helper", "to look for values in the env. block instead of", "is distributed on an “AS IS” BASIS, without # warranties", "self.__vislInstall__) def GetInstallParams(self, key): \"\"\" Waits on Install Parameter to", "cmd = [self.GetOpenSSLPath(), 'pkcs12', '-export', '-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out',", "not found in Env : ' + key) def RunCmd(self,", "for all VISL parameters that VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME =", "use this file except in compliance with the License. You", "Licensed under the Apache License, Version 2.0 (the “License”); you", "a blocking call. # VISL will wait until these value", "in certificates output = self.RunCmd(cmd) logging.info(output) # TODO : Replace", "following defined. VMWARE_SKIP_VISL = True system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir", "self.RunCmd(cmd) logging.info(output) def FindEnvParams(self): \"\"\" Finds the Default Environment parameters.", "skip = os.environ[VMWARE_SKIP_VISL] if (skip in ['true', 'True', 'yes', '1',", "this value, # it is a technique on waiting for", "def GetOpenSSLPath(self): if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES", "Name : \" + self.GetCertFileName()) self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName,", "VISL namespace.\"\"\" # Find VISL Install Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER'", "elif (os.name == 'posix'): return '/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name ==", "(self.__skipInstallParams__ is False): cmd = [self.__vislInstall__, '-d', key] output =", "python # # Copyright © 2012-2016 VMware, Inc. All Rights", "of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "\"VMware, Inc.\" import logging import os import subprocess class CerTool:", "self.GetCertFileName()) self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".priv\") logging.debug(\"Private", "this script to look for values in the env. block", "__certfileName__ = \"\" __privateKeyFileName__ = \"\" __publicKeyFileName__ = \"\" __pfxFileName__", "def GetCertToolPath(self): \"\"\"returns the path to certool\"\"\" #TODO : Publish", "componentName, componentName + \".priv\") logging.debug(\"Private Key Name : \" +", "== \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif", "information. \"\"\" certool.py : This is the standard library function", "certificate if (self.GetHostType() == 'fqdn'): cmd.append('--FQDN=' + self.GetHostName()) # elif", "key] output = self.RunCmd(cmd) logging.debug('Install param found :' + output)", "kind, EITHER EXPRESS OR IMPLIED. See the # License for", "from visl. Or if the VMWARE_SKIP_VISL = True, then reads", "not # use this file except in compliance with the", "== 'posix'): return '/usr/lib/vmware-openSSL/openssl' def main(): \"\"\" Example Code Usage", "name, put that into the certificate if (self.GetHostType() == 'fqdn'):", "\"\"\" Finds the Default Environment parameters. if you are not", "os.path.join(self.GetCertDir(), componentName, componentName + \".pub\") logging.debug(\"Public Key Name : \"", "running under a cloudVM, then it is assumed that the", "self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really don't need", "Generation. if not running under a cloudVM, then it is", "(self.GetHostType() == 'fqdn'): cmd.append('--FQDN=' + self.GetHostName()) # elif (self.GetHostType() ==", "else: if val in os.environ: param = os.environ[key] logging.debug('Env. param", "return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name == 'posix'): return '/usr/lib/vmware-openSSL/openssl'", "Certificate Generation. if not running under a cloudVM, then it", "under a cloudVM, then it is assumed that the OS.Environment", "else: return p.communicate()[0].rstrip() def GetVislParams(self): \"\"\" Waits for all VISL", "\"\" __pfxFileName__ = \"\" def __init__(self): self.FindEnvParams() self.GetVislParams() def GetHostName(self):", "self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in os.environ: skip = os.environ[VMWARE_SKIP_VISL]", "is a blocking call. # VISL will wait until these", "subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode: raise Exception('Failed to execute last", "key): \"\"\" Waits on Install Parameter to return the value", "Inc.\" __version__ = 0.1 __author__ = \"VMware, Inc.\" import logging", "= \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR =", "return self.__vmcaCertPath__ def GetCertFileName(self): return self.__certfileName__ def GetPrivateKeyFileName(self): return self.__privateKeyFile__", "\".pub\") logging.debug(\"Public Key Name : \" + self.GetPublicKeyFileName()) self.__pfxFileName__ =", "enable this script to look for values in the env.", "self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We", "VMCA Certificate Generation. if not running under a cloudVM, then", "+ output) return output else: if val in os.environ: param", "License, Version 2.0 (the “License”); you may not # use", "self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()] output = self.RunCmd(cmd) logging.info(output) cmd =", "if we know the host name, put that into the", "Install Parameter to return the value from visl. Or if", "# distributed under the License is distributed on an “AS", "testComponent = 'sso' VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent) print 'Generated a", "self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really don't need this", "2012-2016 VMware, Inc. All Rights Reserved. # # Licensed under", "= 0.1 __author__ = \"VMware, Inc.\" import logging import os", "values in the env. block instead of VISL namespace.\"\"\" #", "See the # License for the specific language governing permissions", "if not running under a cloudVM, then it is assumed", "a main program, include usage information. \"\"\" certool.py : This", "\\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really don't need this value, #", "in os.environ: skip = os.environ[VMWARE_SKIP_VISL] if (skip in ['true', 'True',", "directory\") except OSError as e: raise Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror))", "self.__publicKeyFile__ def GetPfxFileName(self): return self.__pfxFileName__ def GenCert(self, componentName): \"\"\" Generates", "__author__ = \"VMware, Inc.\" import logging import os import subprocess", "from the os environment\"\"\" if (self.__skipInstallParams__ is False): cmd =", "Please note that each of this is a blocking call.", "system.hostname.type vmca.cert.password vmca.cert.dir \"\"\" __copyright__ = \"Copyright 2012, VMware Inc.\"", "logging.debug(\"cert File Name : \" + self.GetCertFileName()) self.__privateKeyFile__ = \\", "self.RunCmd(cmd) logging.info(output) cmd = [self.GetCertToolPath(), '--genCIScert', '--priv=' + self.GetPrivateKeyFileName(), '--cert='", "GetCertDir(self): return self.__vmcaCertPath__ def GetCertFileName(self): return self.__certfileName__ def GetPrivateKeyFileName(self): return", "look for values in the env. block instead of VISL", "= \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\", "__init__(self): self.FindEnvParams() self.GetVislParams() def GetHostName(self): return self.__systemUrlHostname__ def GetHostType(self): return", "= \"\" __publicKeyFileName__ = \"\" __pfxFileName__ = \"\" def __init__(self):", ": \" + self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir :", "INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" #", "cloudVM/vcenterwindows first boot to integrate with VMCA Certificate Generation. if", "componentName, componentName + \".pub\") logging.debug(\"Public Key Name : \" +", "from VMCA First Boot if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES']", "+ self.GetPublicKeyFileName()) self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pfx\")", "e: raise Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror)) # Generate Private Key", "def GetHostName(self): return self.__systemUrlHostname__ def GetHostType(self): return self.__systemHosttype__ def GetPassword(self):", "call. # VISL will wait until these value are populated", "\" + self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir : \"", "'-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name', componentName, '-passout', 'pass:'", "VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR]", "File Name : \" + self.GetCertFileName()) self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(),", "law or agreed to in writing, software # distributed under", "is a technique on waiting for directory # first boot", "for values in the env. block instead of VISL namespace.\"\"\"", ": ' + key) def RunCmd(self, args): \"\"\" Runs a", "module can be used as a main program, include usage", "conditions of any kind, EITHER EXPRESS OR IMPLIED. See the", "= os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in os.environ: skip = os.environ[VMWARE_SKIP_VISL] if", "each of this is a blocking call. # VISL will", "of VISL namespace.\"\"\" # Find VISL Install Parameter INSTALL_PARAM_ENV_VAR =", "raise Exception('Failed to execute last cmd') else: return p.communicate()[0].rstrip() def", "found in Env : ' + key) def RunCmd(self, args):", "to certool\"\"\" #TODO : Publish Certool Path from VMCA First", "if (skip in ['true', 'True', 'yes', '1', 'skip']): self.__skipInstallParams__ =", "cloudVM, set VMWARE_SKIP_VISL = True in your environment. This will", "the value from the os environment\"\"\" if (self.__skipInstallParams__ is False):", "\" + self.GetCertFileName()) self.__privateKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName +", "%s' % VmcaCertool.GetPfxFileName() print 'Using Password : %s' % VmcaCertool.GetPassword()", "def GetCertDir(self): return self.__vmcaCertPath__ def GetCertFileName(self): return self.__certfileName__ def GetPrivateKeyFileName(self):", "is False): cmd = [self.__vislInstall__, '-d', key] output = self.RunCmd(cmd)", "__publicKeyFileName__ = \"\" __pfxFileName__ = \"\" def __init__(self): self.FindEnvParams() self.GetVislParams()", "= \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pub\") logging.debug(\"Public Key Name", "INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in os.environ:", "this is a blocking call. # VISL will wait until", "def FindEnvParams(self): \"\"\" Finds the Default Environment parameters. if you", "import os import subprocess class CerTool: __vislInstall__ = \"\" __systemUrlHostname__", "{1}\".format(e.errno, e.strerror)) # Generate Private Key and Public Keys First", "__vmcaPassword__ = \"\" __vmcaCertPath__ = \"\" __skipInstallParams__ = False __certfileName__", "Certool Path from VMCA First Boot if(os.name == \"nt\"): PROGRAM_FILES", "subprocess class CerTool: __vislInstall__ = \"\" __systemUrlHostname__ = \"\" __systemHosttype__", "def RunCmd(self, args): \"\"\" Runs a given command\"\"\" logging.info('running %s'", "certool PKCS12 capabilities cmd = [self.GetOpenSSLPath(), 'pkcs12', '-export', '-in', self.GetCertFileName(),", "Certificate Authority # More details. If this module can be", "put that into the certificate if (self.GetHostType() == 'fqdn'): cmd.append('--FQDN='", "a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # #", "(os.name == 'posix'): return '/usr/lib/vmware-openSSL/openssl' def main(): \"\"\" Example Code", "Generate Private Key and Public Keys First cmd = [self.GetCertToolPath(),", "of any kind, EITHER EXPRESS OR IMPLIED. See the #", "# Generate full file names for all artifacts self.__certfileName__ =", "self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pfx\") logging.debug(\"pfx file", "usage information. \"\"\" certool.py : This is the standard library", "__skipInstallParams__ = False __certfileName__ = \"\" __privateKeyFileName__ = \"\" __publicKeyFileName__", "© 2012-2016 VMware, Inc. All Rights Reserved. # # Licensed", "param script' logging.error(errString) raise Exception(errString) logging.debug('Using install param script :", "More details. If this module can be used as a", "stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode: raise Exception('Failed to execute last cmd')", "to integrate with VMCA Certificate Generation. if not running under", "are not running inside the cloudVM, set VMWARE_SKIP_VISL = True", "install param script : ' + self.__vislInstall__) def GetInstallParams(self, key):", "program, include usage information. \"\"\" certool.py : This is the", "that VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\"", "RunCmd(self, args): \"\"\" Runs a given command\"\"\" logging.info('running %s' %", "Example Code Usage \"\"\" testComponent = 'sso' VmcaCertool = CerTool()", "' + self.__vislInstall__) def GetInstallParams(self, key): \"\"\" Waits on Install", "def GetPrivateKeyFileName(self): return self.__privateKeyFile__ def GetPublicKeyFileName(self): return self.__publicKeyFile__ def GetPfxFileName(self):", "def GetHostType(self): return self.__systemHosttype__ def GetPassword(self): return self.__vmcaPassword__ def GetCertDir(self):", "'sso' VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent) print 'Generated a pfx file", "# # Licensed under the Apache License, Version 2.0 (the", "== 'ipv4'): # # Possible TODO : support IPv4 in", "cmd.append('--FQDN=' + self.GetHostName()) # elif (self.GetHostType() == 'ipv4'): # #", "to execute last cmd') else: return p.communicate()[0].rstrip() def GetVislParams(self): \"\"\"", "first boot to integrate with VMCA Certificate Generation. if not", "= False __certfileName__ = \"\" __privateKeyFileName__ = \"\" __publicKeyFileName__ =", "self.__pfxFileName__ def GenCert(self, componentName): \"\"\" Generates the Certificates in the", "componentName): \"\"\" Generates the Certificates in the Cert directory\"\"\" #", "dir) try: if not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\") except OSError", "INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD", "names for all artifacts self.__certfileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName", "to find install param script' logging.error(errString) raise Exception(errString) logging.debug('Using install", "output = self.RunCmd(cmd) logging.debug('Install param found :' + output) return", "INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR", "Key Name : \" + self.GetPublicKeyFileName()) self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(),", "block instead of VISL namespace.\"\"\" # Find VISL Install Parameter", "Install Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR", "\"\" __vmcaCertPath__ = \"\" __skipInstallParams__ = False __certfileName__ = \"\"", "file names for all artifacts self.__certfileName__ = \\ os.path.join(self.GetCertDir(), componentName,", "the VMWARE_SKIP_VISL = True, then reads the value from the", "(os.name == 'posix'): return '/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name == \"nt\"):", "details. If this module can be used as a main", "__copyright__ = \"Copyright 2012, VMware Inc.\" __version__ = 0.1 __author__", "= \"\" __pfxFileName__ = \"\" def __init__(self): self.FindEnvParams() self.GetVislParams() def", "Cert directory\"\"\" # Generate full file names for all artifacts", "= \"Copyright 2012, VMware Inc.\" __version__ = 0.1 __author__ =", "at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "p.communicate()[0].rstrip() def GetVislParams(self): \"\"\" Waits for all VISL parameters that", "try: if not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\") except OSError as", "under the License. # # Helper function that gets certificates", "= \"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD =", "= True, then reads the value from the os environment\"\"\"", ": \" + self.GetPublicKeyFileName()) self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName", "logging.info(output) cmd = [self.GetCertToolPath(), '--genCIScert', '--priv=' + self.GetPrivateKeyFileName(), '--cert=' +", "in compliance with the License. You may obtain a copy", "main program, include usage information. \"\"\" certool.py : This is", "main(): \"\"\" Example Code Usage \"\"\" testComponent = 'sso' VmcaCertool", "this module can be used as a main program, include", "\\ os.path.join(self.GetCertDir(), componentName, componentName + \".crt\") logging.debug(\"cert File Name :", "self.__systemHosttype__ def GetPassword(self): return self.__vmcaPassword__ def GetCertDir(self): return self.__vmcaCertPath__ def", "[self.GetCertToolPath(), '--genkey', '--priv=' + self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()] output =", "Password : %s' % VmcaCertool.GetPassword() if __name__ == \"__main__\": main()", "certificates output = self.RunCmd(cmd) logging.info(output) # TODO : Replace this", "componentName + \".pfx\") logging.debug(\"pfx file Name : \" + self.GetPfxFileName())", "logging.error(errString) raise Exception(errString) logging.debug('Using install param script : ' +", "[self.__vislInstall__, '-d', key] output = self.RunCmd(cmd) logging.debug('Install param found :'", "License for the specific language governing permissions and limitations #", "True system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir \"\"\" __copyright__ = \"Copyright", "Code Usage \"\"\" testComponent = 'sso' VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent)", "support IPv4 in certificates # elif (self.GetHostType() == 'ipv6'): #", "an “AS IS” BASIS, without # warranties or conditions of", "as e: raise Exception(\"I/O error({0}): {1}\".format(e.errno, e.strerror)) # Generate Private", "param) return param else: raise Exception('Requested Value not found in", "Waits for all VISL parameters that VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME", "Environment parameters. if you are not running inside the cloudVM,", "self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pub\") logging.debug(\"Public Key", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "that into the certificate if (self.GetHostType() == 'fqdn'): cmd.append('--FQDN=' +", "by the # appropriate Script self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__", "= \"vmca.cert.dir\" # Please note that each of this is", "(the “License”); you may not # use this file except", "first boot to finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns", "# # Copyright © 2012-2016 VMware, Inc. All Rights Reserved.", "\\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__ = \\ self.GetInstallParams(INSTALL_PARAM_PASSWORD)", "VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent) print 'Generated a pfx file :", "\"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name", "self.__certfileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".crt\") logging.debug(\"cert File", "populated by the # appropriate Script self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME)", "(self.GetHostType() == 'ipv6'): # # Possible TODO : support IPv6", "self.GetPublicKeyFileName()) self.__pfxFileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pfx\") logging.debug(\"pfx", "We really don't need this value, # it is a", "Rights Reserved. # # Licensed under the Apache License, Version", "system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir \"\"\" __copyright__ = \"Copyright 2012,", "\"vmca.cert.dir\" # Please note that each of this is a", "has the following defined. VMWARE_SKIP_VISL = True system.urlhostname vmdir.ldu-guid system.hostname.type", "= [self.GetCertToolPath(), '--genkey', '--priv=' + self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()] output", "os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in os.environ: skip = os.environ[VMWARE_SKIP_VISL] if (skip", "PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name ==", "all VISL parameters that VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\"", "(not self.__vislInstall__ and self.__skipInstallParams__ is False): errString = 'Unable to", "output else: if val in os.environ: param = os.environ[key] logging.debug('Env.", "namespace.\"\"\" # Find VISL Install Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL", "not running under a cloudVM, then it is assumed that", "__version__ = 0.1 __author__ = \"VMware, Inc.\" import logging import", "are populated by the # appropriate Script self.__systemUrlHostname__ = \\", "# We really don't need this value, # it is", "VmcaCertool.GetPfxFileName() print 'Using Password : %s' % VmcaCertool.GetPassword() if __name__", "== 'ipv6'): # # Possible TODO : support IPv6 in", "the value from visl. Or if the VMWARE_SKIP_VISL = True,", "= \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pfx\") logging.debug(\"pfx file Name", "(self.GetHostType() == 'ipv4'): # # Possible TODO : support IPv4", "componentName, '-passout', 'pass:' + self.GetPassword()] output = self.RunCmd(cmd) logging.info(output) def", "the path to certool\"\"\" #TODO : Publish Certool Path from", "INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in os.environ:", "License is distributed on an “AS IS” BASIS, without #", "used as a main program, include usage information. \"\"\" certool.py", "blocking call. # VISL will wait until these value are", "License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns the path to certool\"\"\" #TODO :", "False __certfileName__ = \"\" __privateKeyFileName__ = \"\" __publicKeyFileName__ = \"\"", "if val in os.environ: param = os.environ[key] logging.debug('Env. param found", "Public Keys First cmd = [self.GetCertToolPath(), '--genkey', '--priv=' + self.GetPrivateKeyFileName(),", "\"\" __publicKeyFileName__ = \"\" __pfxFileName__ = \"\" def __init__(self): self.FindEnvParams()", "will enable this script to look for values in the", "self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pub\") logging.debug(\"Public", "key) def RunCmd(self, args): \"\"\" Runs a given command\"\"\" logging.info('running", "stderr=subprocess.STDOUT) if p.returncode: raise Exception('Failed to execute last cmd') else:", "def main(): \"\"\" Example Code Usage \"\"\" testComponent = 'sso'", "value from the os environment\"\"\" if (self.__skipInstallParams__ is False): cmd", "= True system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir \"\"\" __copyright__ =", "distributed on an “AS IS” BASIS, without # warranties or", ": Replace this with certool PKCS12 capabilities cmd = [self.GetOpenSSLPath(),", "[self.GetCertToolPath(), '--genCIScert', '--priv=' + self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(), '--Name=' +", "know the host name, put that into the certificate if", "VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE", ": support IPv6 in certificates output = self.RunCmd(cmd) logging.info(output) #", "logging.debug(\"Created directory\") except OSError as e: raise Exception(\"I/O error({0}): {1}\".format(e.errno,", "+ self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(), '--Name=' + componentName] # if", "# # Possible TODO : support IPv4 in certificates #", "appropriate Script self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE)", "GetInstallParams(self, key): \"\"\" Waits on Install Parameter to return the", "self.GetPassword()] output = self.RunCmd(cmd) logging.info(output) def FindEnvParams(self): \"\"\" Finds the", "= subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode: raise Exception('Failed to execute", "Name : \" + self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName,", "for directory # first boot to finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID)", "the # License for the specific language governing permissions and", "will wait until these value are populated by the #", "script to look for values in the env. block instead", "environment. This will enable this script to look for values", "# # Unless required by applicable law or agreed to", "parameters. if you are not running inside the cloudVM, set", "Value not found in Env : ' + key) def", "certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE =", "import subprocess class CerTool: __vislInstall__ = \"\" __systemUrlHostname__ = \"\"", "# Find VISL Install Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL =", "for cloudVM/vcenterwindows first boot to integrate with VMCA Certificate Generation.", "raise Exception('Requested Value not found in Env : ' +", "wait until these value are populated by the # appropriate", "limitations # under the License. # # Helper function that", "'--cert=' + self.GetCertFileName(), '--Name=' + componentName] # if we know", "env. block instead of VISL namespace.\"\"\" # Find VISL Install", "except in compliance with the License. You may obtain a", "\"\"\" Generates the Certificates in the Cert directory\"\"\" # Generate", "directory # first boot to finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def", "Boot if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES +", "+ '/VMware/CIS/Vmcad/certool.exe') elif (os.name == 'posix'): return '/opt/vmware/bin/certool' def GetOpenSSLPath(self):", "# More details. If this module can be used as", "found :' + output) return output else: if val in", "'/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return", "set VMWARE_SKIP_VISL = True in your environment. This will enable", "certificates from VMWare Certificate Authority # More details. If this", "\"\" def __init__(self): self.FindEnvParams() self.GetVislParams() def GetHostName(self): return self.__systemUrlHostname__ def", "a given command\"\"\" logging.info('running %s' % args) p = subprocess.Popen(args,", "logging.info(output) # TODO : Replace this with certool PKCS12 capabilities", "cmd = [self.GetCertToolPath(), '--genkey', '--priv=' + self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()]", "the os environment\"\"\" if (self.__skipInstallParams__ is False): cmd = [self.__vislInstall__,", "a pfx file : %s' % VmcaCertool.GetPfxFileName() print 'Using Password", "param found : ' + param) return param else: raise", "# # Helper function that gets certificates from VMWare Certificate", "'fqdn'): cmd.append('--FQDN=' + self.GetHostName()) # elif (self.GetHostType() == 'ipv4'): #", "copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Default Environment parameters. if you are not running inside the", "\" + self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName +", "False): errString = 'Unable to find install param script' logging.error(errString)", "by applicable law or agreed to in writing, software #", "\"\"\" __copyright__ = \"Copyright 2012, VMware Inc.\" __version__ = 0.1", "can be used as a main program, include usage information.", "GetVislParams(self): \"\"\" Waits for all VISL parameters that VMCA certool", "self.__vislInstall__ and self.__skipInstallParams__ is False): errString = 'Unable to find", "= self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns the path to certool\"\"\" #TODO", "'--genCIScert', '--priv=' + self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(), '--Name=' + componentName]", "visl. Or if the VMWARE_SKIP_VISL = True, then reads the", "param found :' + output) return output else: if val", "the Default Environment parameters. if you are not running inside", "= \\ os.path.join(self.GetCertDir(), componentName, componentName + \".crt\") logging.debug(\"cert File Name", "Certificates in the Cert directory\"\"\" # Generate full file names", "output = self.RunCmd(cmd) logging.info(output) cmd = [self.GetCertToolPath(), '--genCIScert', '--priv=' +", "VISL Install Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if", "val in os.environ: param = os.environ[key] logging.debug('Env. param found :", "# elif (self.GetHostType() == 'ipv4'): # # Possible TODO :", "of this is a blocking call. # VISL will wait", "os.path.join(self.GetCertDir(), componentName, componentName + \".crt\") logging.debug(\"cert File Name : \"", "assumed that the OS.Environment has the following defined. VMWARE_SKIP_VISL =", "self.GetCertFileName(), '--Name=' + componentName] # if we know the host", "return the value from visl. Or if the VMWARE_SKIP_VISL =", "is the standard library function for cloudVM/vcenterwindows first boot to", "Env : ' + key) def RunCmd(self, args): \"\"\" Runs", "GetPassword(self): return self.__vmcaPassword__ def GetCertDir(self): return self.__vmcaCertPath__ def GetCertFileName(self): return", "under the License is distributed on an “AS IS” BASIS,", "TODO : support IPv4 in certificates # elif (self.GetHostType() ==", "gets certificates from VMWare Certificate Authority # More details. If", "in the Cert directory\"\"\" # Generate full file names for", "with VMCA Certificate Generation. if not running under a cloudVM,", "Possible TODO : support IPv4 in certificates # elif (self.GetHostType()", "os.environ: param = os.environ[key] logging.debug('Env. param found : ' +", "VmcaCertool.GenCert(testComponent) print 'Generated a pfx file : %s' % VmcaCertool.GetPfxFileName()", "'1', 'skip']): self.__skipInstallParams__ = True if (not self.__vislInstall__ and self.__skipInstallParams__", "\\ self.GetInstallParams(INSTALL_PARAM_PASSWORD) self.__vmcaCertPath__ = \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really don't", "Parameter INSTALL_PARAM_ENV_VAR = 'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in", "return output else: if val in os.environ: param = os.environ[key]", "GetPfxFileName(self): return self.__pfxFileName__ def GenCert(self, componentName): \"\"\" Generates the Certificates", "# Helper function that gets certificates from VMWare Certificate Authority", "governing permissions and limitations # under the License. # #", "import logging import os import subprocess class CerTool: __vislInstall__ =", "Script self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_HOST_TYPE) self.__vmcaPassword__", "integrate with VMCA Certificate Generation. if not running under a", "the standard library function for cloudVM/vcenterwindows first boot to integrate", "(skip in ['true', 'True', 'yes', '1', 'skip']): self.__skipInstallParams__ = True", "\"\" __systemHosttype__ = \"\" __vmcaPassword__ = \"\" __vmcaCertPath__ = \"\"", "# first boot to finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self):", "#TODO : Publish Certool Path from VMCA First Boot if(os.name", "= \"\" __vmcaCertPath__ = \"\" __skipInstallParams__ = False __certfileName__ =", "Unless required by applicable law or agreed to in writing,", "param else: raise Exception('Requested Value not found in Env :", "\".pfx\") logging.debug(\"pfx file Name : \" + self.GetPfxFileName()) dir =", "\"system.urlhostname\" INSTALL_PARAM_LDU_GUID = \"vmdir.ldu-guid\" INSTALL_PARAM_SYSTEM_HOST_TYPE = \"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\"", "Usage \"\"\" testComponent = 'sso' VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent) print", "+ self.GetPassword()] output = self.RunCmd(cmd) logging.info(output) def FindEnvParams(self): \"\"\" Finds", "found : ' + param) return param else: raise Exception('Requested", "= \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" # Please note that each", "__systemUrlHostname__ = \"\" __systemHosttype__ = \"\" __vmcaPassword__ = \"\" __vmcaCertPath__", "script' logging.error(errString) raise Exception(errString) logging.debug('Using install param script : '", "and Public Keys First cmd = [self.GetCertToolPath(), '--genkey', '--priv=' +", "is assumed that the OS.Environment has the following defined. VMWARE_SKIP_VISL", "the Apache License, Version 2.0 (the “License”); you may not", "return self.__certfileName__ def GetPrivateKeyFileName(self): return self.__privateKeyFile__ def GetPublicKeyFileName(self): return self.__publicKeyFile__", "the License. You may obtain a copy # of the", "applicable law or agreed to in writing, software # distributed", "# use this file except in compliance with the License.", "= self.RunCmd(cmd) logging.info(output) cmd = [self.GetCertToolPath(), '--genCIScert', '--priv=' + self.GetPrivateKeyFileName(),", "GetCertFileName(self): return self.__certfileName__ def GetPrivateKeyFileName(self): return self.__privateKeyFile__ def GetPublicKeyFileName(self): return", "\"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" # Please note", "Apache License, Version 2.0 (the “License”); you may not #", "+ componentName] # if we know the host name, put", "2012, VMware Inc.\" __version__ = 0.1 __author__ = \"VMware, Inc.\"", "logging.debug('Install param found :' + output) return output else: if", "return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif (os.name == 'posix'): return '/opt/vmware/bin/certool'", "'VMWARE_INSTALL_PARAMETER' VMWARE_SKIP_VISL = 'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__ =", "= \\ os.path.join(self.GetCertDir(), componentName, componentName + \".priv\") logging.debug(\"Private Key Name", "VISL parameters that VMCA certool needs\"\"\" INSTALL_PARAM_SYSTEM_URL_HOSTNAME = \"system.urlhostname\" INSTALL_PARAM_LDU_GUID", "VMWare Certificate Authority # More details. If this module can", "error({0}): {1}\".format(e.errno, e.strerror)) # Generate Private Key and Public Keys", "in writing, software # distributed under the License is distributed", "+ self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName + \".pub\")", "output = self.RunCmd(cmd) logging.info(output) # TODO : Replace this with", "cloudVM, then it is assumed that the OS.Environment has the", "\".priv\") logging.debug(\"Private Key Name : \" + self.GetPrivateKeyFileName()) self.__publicKeyFile__ =", "if the VMWARE_SKIP_VISL = True, then reads the value from", "'-out', self.GetPfxFileName(), '-name', componentName, '-passout', 'pass:' + self.GetPassword()] output =", "Generates the Certificates in the Cert directory\"\"\" # Generate full", "that the OS.Environment has the following defined. VMWARE_SKIP_VISL = True", "for all artifacts self.__certfileName__ = \\ os.path.join(self.GetCertDir(), componentName, componentName +", "# Please note that each of this is a blocking", "distributed under the License is distributed on an “AS IS”", "\"\" __vmcaPassword__ = \"\" __vmcaCertPath__ = \"\" __skipInstallParams__ = False", "__pfxFileName__ = \"\" def __init__(self): self.FindEnvParams() self.GetVislParams() def GetHostName(self): return", "os.environ: skip = os.environ[VMWARE_SKIP_VISL] if (skip in ['true', 'True', 'yes',", "'ipv6'): # # Possible TODO : support IPv6 in certificates", "self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name', componentName, '-passout', 'pass:' + self.GetPassword()] output", "% args) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode: raise", "directory\"\"\" # Generate full file names for all artifacts self.__certfileName__", "Replace this with certool PKCS12 capabilities cmd = [self.GetOpenSSLPath(), 'pkcs12',", "value, # it is a technique on waiting for directory", "__systemHosttype__ = \"\" __vmcaPassword__ = \"\" __vmcaCertPath__ = \"\" __skipInstallParams__", "if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe')", "if (self.GetHostType() == 'fqdn'): cmd.append('--FQDN=' + self.GetHostName()) # elif (self.GetHostType()", "value are populated by the # appropriate Script self.__systemUrlHostname__ =", "Exception(errString) logging.debug('Using install param script : ' + self.__vislInstall__) def", "self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir : \" + dir)", "== \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif", "Possible TODO : support IPv6 in certificates output = self.RunCmd(cmd)", "on waiting for directory # first boot to finish. discardldu", "= \"\" __systemUrlHostname__ = \"\" __systemHosttype__ = \"\" __vmcaPassword__ =", "compliance with the License. You may obtain a copy #", "cmd = [self.__vislInstall__, '-d', key] output = self.RunCmd(cmd) logging.debug('Install param", "+ param) return param else: raise Exception('Requested Value not found", "script : ' + self.__vislInstall__) def GetInstallParams(self, key): \"\"\" Waits", "= [self.GetCertToolPath(), '--genCIScert', '--priv=' + self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(), '--Name='", "License. # # Helper function that gets certificates from VMWare", "== 'posix'): return '/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name == \"nt\"): PROGRAM_FILES", "self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name', componentName, '-passout', 'pass:' +", "capabilities cmd = [self.GetOpenSSLPath(), 'pkcs12', '-export', '-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(),", ": \" + self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\ os.path.join(self.GetCertDir(), componentName, componentName", "IMPLIED. See the # License for the specific language governing", "' + key) def RunCmd(self, args): \"\"\" Runs a given", "certool\"\"\" #TODO : Publish Certool Path from VMCA First Boot", "2.0 (the “License”); you may not # use this file", "you are not running inside the cloudVM, set VMWARE_SKIP_VISL =", "return '/usr/lib/vmware-openSSL/openssl' def main(): \"\"\" Example Code Usage \"\"\" testComponent", "# of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "'--genkey', '--priv=' + self.GetPrivateKeyFileName(), '--pub=' + self.GetPublicKeyFileName()] output = self.RunCmd(cmd)", "os import subprocess class CerTool: __vislInstall__ = \"\" __systemUrlHostname__ =", "CerTool() VmcaCertool.GenCert(testComponent) print 'Generated a pfx file : %s' %", "= \"VMware, Inc.\" import logging import os import subprocess class", "# License for the specific language governing permissions and limitations", "= os.environ[VMWARE_SKIP_VISL] if (skip in ['true', 'True', 'yes', '1', 'skip']):", "else: raise Exception('Requested Value not found in Env : '", "'pkcs12', '-export', '-in', self.GetCertFileName(), '-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name', componentName,", "args) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if p.returncode: raise Exception('Failed", "to finish. discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns the path", "self.__skipInstallParams__ = True if (not self.__vislInstall__ and self.__skipInstallParams__ is False):", "componentName] # if we know the host name, put that", "def GetPfxFileName(self): return self.__pfxFileName__ def GenCert(self, componentName): \"\"\" Generates the", "Key and Public Keys First cmd = [self.GetCertToolPath(), '--genkey', '--priv='", "technique on waiting for directory # first boot to finish.", "CerTool: __vislInstall__ = \"\" __systemUrlHostname__ = \"\" __systemHosttype__ = \"\"", "TODO : support IPv6 in certificates output = self.RunCmd(cmd) logging.info(output)", ": %s' % VmcaCertool.GetPfxFileName() print 'Using Password : %s' %", "self.RunCmd(cmd) logging.debug('Install param found :' + output) return output else:", "Copyright © 2012-2016 VMware, Inc. All Rights Reserved. # #", "self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(), '--Name=' + componentName] # if we", "in Env : ' + key) def RunCmd(self, args): \"\"\"", ": ' + param) return param else: raise Exception('Requested Value", "or conditions of any kind, EITHER EXPRESS OR IMPLIED. See", "'--Name=' + componentName] # if we know the host name,", "vmca.cert.dir \"\"\" __copyright__ = \"Copyright 2012, VMware Inc.\" __version__ =", "if (not self.__vislInstall__ and self.__skipInstallParams__ is False): errString = 'Unable", "== 'fqdn'): cmd.append('--FQDN=' + self.GetHostName()) # elif (self.GetHostType() == 'ipv4'):", "= \"system.hostname.type\" INSTALL_PARAM_PASSWORD = \"<PASSWORD>\" INSTALL_PARAM_CERT_DIR = \"vmca.cert.dir\" # Please", "standard library function for cloudVM/vcenterwindows first boot to integrate with", "language governing permissions and limitations # under the License. #", "+ \".crt\") logging.debug(\"cert File Name : \" + self.GetCertFileName()) self.__privateKeyFile__", "+ self.GetCertFileName(), '--Name=' + componentName] # if we know the", "that gets certificates from VMWare Certificate Authority # More details.", "+ dir) try: if not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\") except", "'posix'): return '/usr/lib/vmware-openSSL/openssl' def main(): \"\"\" Example Code Usage \"\"\"", "that each of this is a blocking call. # VISL", "self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really don't need this value, # it", "elif (os.name == 'posix'): return '/usr/lib/vmware-openSSL/openssl' def main(): \"\"\" Example", "# Possible TODO : support IPv6 in certificates output =", "You may obtain a copy # of the License at", "self.RunCmd(cmd) logging.info(output) # TODO : Replace this with certool PKCS12", "Finds the Default Environment parameters. if you are not running", "on Install Parameter to return the value from visl. Or", "really don't need this value, # it is a technique", "return '/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES']", "function for cloudVM/vcenterwindows first boot to integrate with VMCA Certificate", "and self.__skipInstallParams__ is False): errString = 'Unable to find install", "PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif (os.name ==", "os.environ[key] logging.debug('Env. param found : ' + param) return param", "discardldu = self.GetInstallParams(INSTALL_PARAM_LDU_GUID) def GetCertToolPath(self): \"\"\"returns the path to certool\"\"\"", "= self.RunCmd(cmd) logging.debug('Install param found :' + output) return output", "in certificates # elif (self.GetHostType() == 'ipv6'): # # Possible", "print 'Generated a pfx file : %s' % VmcaCertool.GetPfxFileName() print", "param = os.environ[key] logging.debug('Env. param found : ' + param)", "IPv4 in certificates # elif (self.GetHostType() == 'ipv6'): # #", "cmd = [self.GetCertToolPath(), '--genCIScert', '--priv=' + self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(),", "Path from VMCA First Boot if(os.name == \"nt\"): PROGRAM_FILES =", "file : %s' % VmcaCertool.GetPfxFileName() print 'Using Password : %s'", "p.returncode: raise Exception('Failed to execute last cmd') else: return p.communicate()[0].rstrip()", "for the specific language governing permissions and limitations # under", "= True in your environment. This will enable this script", "need this value, # it is a technique on waiting", "self.__vmcaPassword__ def GetCertDir(self): return self.__vmcaCertPath__ def GetCertFileName(self): return self.__certfileName__ def", "os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in os.environ: skip =", "PKCS12 capabilities cmd = [self.GetOpenSSLPath(), 'pkcs12', '-export', '-in', self.GetCertFileName(), '-inkey',", "in ['true', 'True', 'yes', '1', 'skip']): self.__skipInstallParams__ = True if", "# TODO : Replace this with certool PKCS12 capabilities cmd", "def GetVislParams(self): \"\"\" Waits for all VISL parameters that VMCA", "# appropriate Script self.__systemUrlHostname__ = \\ self.GetInstallParams(INSTALL_PARAM_SYSTEM_URL_HOSTNAME) self.__systemHosttype__ = \\", "= os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir : \" + dir) try: if", "OR IMPLIED. See the # License for the specific language", "the host name, put that into the certificate if (self.GetHostType()", "= \\ self.GetInstallParams(INSTALL_PARAM_CERT_DIR) # We really don't need this value,", "os.path.normpath(PROGRAM_FILES + '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name == 'posix'): return '/usr/lib/vmware-openSSL/openssl' def", "a cloudVM, then it is assumed that the OS.Environment has", "'True', 'yes', '1', 'skip']): self.__skipInstallParams__ = True if (not self.__vislInstall__", "+ '/VMware/CIS/OpenSSL/openssl.exe') elif (os.name == 'posix'): return '/usr/lib/vmware-openSSL/openssl' def main():", "IPv6 in certificates output = self.RunCmd(cmd) logging.info(output) # TODO :", "First Boot if(os.name == \"nt\"): PROGRAM_FILES = os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES", "print 'Using Password : %s' % VmcaCertool.GetPassword() if __name__ ==", "value from visl. Or if the VMWARE_SKIP_VISL = True, then", "= \"\" __privateKeyFileName__ = \"\" __publicKeyFileName__ = \"\" __pfxFileName__ =", "'ipv4'): # # Possible TODO : support IPv4 in certificates", "def GenCert(self, componentName): \"\"\" Generates the Certificates in the Cert", "in your environment. This will enable this script to look", "it is assumed that the OS.Environment has the following defined.", "'--priv=' + self.GetPrivateKeyFileName(), '--cert=' + self.GetCertFileName(), '--Name=' + componentName] #", "\"\"\" testComponent = 'sso' VmcaCertool = CerTool() VmcaCertool.GenCert(testComponent) print 'Generated", "logging.debug(\"Public Key Name : \" + self.GetPublicKeyFileName()) self.__pfxFileName__ = \\", "if INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL in", "= self.RunCmd(cmd) logging.info(output) def FindEnvParams(self): \"\"\" Finds the Default Environment", "# Unless required by applicable law or agreed to in", "'VMWARE_SKIP_VISL' if INSTALL_PARAM_ENV_VAR in os.environ: self.__vislInstall__ = os.environ[INSTALL_PARAM_ENV_VAR] if VMWARE_SKIP_VISL", "not os.path.exists(dir): os.makedirs(dir) logging.debug(\"Created directory\") except OSError as e: raise", "'-inkey', self.GetPrivateKeyFileName(), '-out', self.GetPfxFileName(), '-name', componentName, '-passout', 'pass:' + self.GetPassword()]", "VMWARE_SKIP_VISL in os.environ: skip = os.environ[VMWARE_SKIP_VISL] if (skip in ['true',", "EITHER EXPRESS OR IMPLIED. See the # License for the", "\\ os.path.join(self.GetCertDir(), componentName, componentName + \".pub\") logging.debug(\"Public Key Name :", "“AS IS” BASIS, without # warranties or conditions of any", "VMWARE_SKIP_VISL = True system.urlhostname vmdir.ldu-guid system.hostname.type vmca.cert.password vmca.cert.dir \"\"\" __copyright__", "is False): errString = 'Unable to find install param script'", "VMWARE_SKIP_VISL = True in your environment. This will enable this", "= True if (not self.__vislInstall__ and self.__skipInstallParams__ is False): errString", "Runs a given command\"\"\" logging.info('running %s' % args) p =", "then it is assumed that the OS.Environment has the following", "the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "return self.__publicKeyFile__ def GetPfxFileName(self): return self.__pfxFileName__ def GenCert(self, componentName): \"\"\"", "+ self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName) logging.debug(\"Target Dir : \" +", "path to certool\"\"\" #TODO : Publish Certool Path from VMCA", "os.environ['PROGRAMFILES'] return os.path.normpath(PROGRAM_FILES + '/VMware/CIS/Vmcad/certool.exe') elif (os.name == 'posix'): return", "logging.debug(\"pfx file Name : \" + self.GetPfxFileName()) dir = os.path.join(self.GetCertDir(),componentName)", "logging.debug(\"Private Key Name : \" + self.GetPrivateKeyFileName()) self.__publicKeyFile__ = \\", "if (self.__skipInstallParams__ is False): cmd = [self.__vislInstall__, '-d', key] output", "if VMWARE_SKIP_VISL in os.environ: skip = os.environ[VMWARE_SKIP_VISL] if (skip in", "'/VMware/CIS/Vmcad/certool.exe') elif (os.name == 'posix'): return '/opt/vmware/bin/certool' def GetOpenSSLPath(self): if(os.name", "return self.__systemHosttype__ def GetPassword(self): return self.__vmcaPassword__ def GetCertDir(self): return self.__vmcaCertPath__", "IS” BASIS, without # warranties or conditions of any kind,", "self.GetPfxFileName(), '-name', componentName, '-passout', 'pass:' + self.GetPassword()] output = self.RunCmd(cmd)", ":' + output) return output else: if val in os.environ:", "<gh_stars>100-1000 #!/usr/bin/env python # # Copyright © 2012-2016 VMware, Inc.", "Helper function that gets certificates from VMWare Certificate Authority #", "Inc.\" import logging import os import subprocess class CerTool: __vislInstall__" ]
[ "to the Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for", "notifications for create/update events of findings, assets and etc. >", "not None: pulumi.set(__self__, \"service_account\", service_account) if streaming_config is not None:", "events of findings, assets and etc. > **Note:** In order", "these accepted formats ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}}", "valid opts.id to get an existing resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs)", "pulumi.Output[str]: \"\"\" The resource name of this notification config, in", "etc. > **Note:** In order to use Cloud SCC resources,", "pulumi.set(__self__, \"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\") def config_id(self) -> Optional[pulumi.Input[str]]: \"\"\"", "(max of 1024 characters). :param pulumi.Input[str] organization: The organization whose", "\"pubsub.topics.publish\" permission to publish to the Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]", "options to be a ResourceOptions instance') if opts.version is None:", "and not opts.urn: raise TypeError(\"Missing required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] =", "None: pulumi.set(__self__, \"description\", description) if name is not None: pulumi.set(__self__,", "raise TypeError('Expected resource options to be a ResourceOptions instance') if", "you may run into errors during resource creation. To get", "__props__.__dict__[\"streaming_config\"] = streaming_config return NotificationConfig(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\") def", "organization. \"\"\" return pulumi.get(self, \"config_id\") @property @pulumi.getter def description(self) ->", "description) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Input[str]: \"\"\" This must", "service_account(self) -> Optional[pulumi.Input[str]]: \"\"\" The service account that needs \"pubsub.topics.publish\"", "Notification Config lives in. :param pulumi.Input[str] pubsub_topic: The Pub/Sub topic", "1024 characters). \"\"\" return pulumi.get(self, \"description\") @description.setter def description(self, value:", "is not None: pulumi.set(__self__, \"description\", description) if name is not", "streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config for triggering streaming-based notifications.", "notification config (max of 1024 characters). \"\"\" return pulumi.get(self, \"description\")", "organization. \"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]):", "if description is not None: pulumi.set(__self__, \"description\", description) if name", "scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My custom", "generated by the Pulumi Terraform Bridge (tfgen) Tool. *** #", "Get an existing NotificationConfig resource's state with the given name,", "resource options to be a ResourceOptions instance') if opts.version is", "the resulting resource. :param pulumi.Input[str] id: The unique provider ID", "__self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id:", "\"\"\" return pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Output[str]:", "Input properties used for looking up and filtering NotificationConfig resources.", "Security Command Center the Notification Config lives in. :param pulumi.Input[str]", "Basic ```python import pulumi import pulumi_gcp as gcp scc_notification =", "config_id=\"my-config\", organization=\"123456789\", description=\"My custom Cloud Security Command Center Finding Notification", "\"\"\" The organization whose Cloud Security Command Center the Notification", "topic to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". \"\"\"", "@property @pulumi.getter(name=\"configId\") def config_id(self) -> Optional[pulumi.Input[str]]: \"\"\" This must be", "_utilities from . import outputs from ._inputs import * __all__", "format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def organization(self)", "is None and not opts.urn: raise TypeError(\"Missing required property 'pubsub_topic'\")", "\"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]):", "configuration to send notifications for create/update events of findings, assets", "\"name\", value) @property @pulumi.getter def organization(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "and etc. > **Note:** In order to use Cloud SCC", "the Notification Config lives in. \"\"\" return pulumi.get(self, \"organization\") @organization.setter", "-> pulumi.Output[str]: \"\"\" The service account that needs \"pubsub.topics.publish\" permission", "was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***", "documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage", "\\\"OPEN_FIREWALL\\\" AND state = \\\"ACTIVE\\\"\", )) ``` ## Import NotificationConfig", "= None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): \"\"\" A Cloud", "[API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example", "to use to populate this resource's properties. :param pulumi.ResourceOptions opts:", ":param pulumi.Input[str] config_id: This must be unique within the organization.", "by the Pulumi Terraform Bridge (tfgen) Tool. *** # ***", "def organization(self) -> pulumi.Output[str]: \"\"\" The organization whose Cloud Security", "return pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\"", "of the notification config (max of 1024 characters). \"\"\" return", "organization is not None: pulumi.set(__self__, \"organization\", organization) if pubsub_topic is", "@description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @pulumi.input_type class", "Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not", "within the organization. \"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self,", "pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']:", "streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): if opts is None: opts", "None): \"\"\" Input properties used for looking up and filtering", "to the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\")", "None): \"\"\" The set of arguments for constructing a NotificationConfig", "To get more information about NotificationConfig, see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs)", "\"pubsub.topics.publish\" permission to publish to the Pub/Sub topic. \"\"\" return", "args: The arguments to use to populate this resource's properties.", "Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub topic to send notifications to. Its", "overload from .. import _utilities from . import outputs from", "None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if service_account is not None: pulumi.set(__self__,", "= ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs: def __init__(__self__, *, config_id:", "permission to publish to the Pub/Sub topic. \"\"\" return pulumi.get(self,", "to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config", "_utilities.get_version() if opts.id is None: if __props__ is not None:", "**kwargs): resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args", "pulumi.set(self, \"description\", value) @pulumi.input_type class _NotificationConfigState: def __init__(__self__, *, config_id:", "\"organization\") @organization.setter def organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value) @property", "pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config) if description is not None: pulumi.set(__self__,", "\"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self,", "config for triggering streaming-based notifications. Structure is documented below. :param", "pulumi.get(self, \"config_id\") @property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: \"\"\" The", "*** import warnings import pulumi import pulumi.runtime from typing import", "pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value)", "streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): \"\"\" A Cloud Security Command", "def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value) @property @pulumi.getter def", "= pubsub_topic if streaming_config is None and not opts.urn: raise", "value: pulumi.Input[str]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def organization(self) ->", "is None: if __props__ is not None: raise TypeError('__props__ is", "pubsub_topic(self) -> pulumi.Input[str]: \"\"\" The Pub/Sub topic to send notifications", "resulting resource. :param pulumi.Input[str] id: The unique provider ID of", "must be enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so, you", "must be unique within the organization. :param pulumi.Input[str] organization: The", "Optional[pulumi.Input[str]]: \"\"\" This must be unique within the organization. \"\"\"", "to. Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter", "\"streaming_config\", value) class NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts:", "None, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, organization:", "config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]]", "valid when passed in combination with a valid opts.id to", "topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based notifications.", "this file was generated by the Pulumi Terraform Bridge (tfgen)", "\"organization\") @organization.setter def organization(self, value: pulumi.Input[str]): pulumi.set(self, \"organization\", value) @property", "so, you may run into errors during resource creation. To", "```python import pulumi import pulumi_gcp as gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\")", "pulumi.set(__self__, \"organization\", organization) if pubsub_topic is not None: pulumi.set(__self__, \"pubsub_topic\",", "streaming-based notifications. Structure is documented below. \"\"\" ... @overload def", "opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id", "pulumi.set(self, \"config_id\", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\"", "in. :param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications", "of the notification config (max of 1024 characters). :param pulumi.Input[str]", "\"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self,", "= None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None):", "value) @pulumi.input_type class _NotificationConfigState: def __init__(__self__, *, config_id: Optional[pulumi.Input[str]] =", "be a ResourceOptions instance') if opts.version is None: opts.version =", "resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource.", "A Cloud Security Command Center (Cloud SCC) notification configs. A", "notifications to. Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\")", "def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\") def", "property 'organization'\") __props__.__dict__[\"organization\"] = organization if pubsub_topic is None and", "if name is not None: pulumi.set(__self__, \"name\", name) if organization", "@property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config for", "to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str]", "def description(self) -> pulumi.Output[Optional[str]]: \"\"\" The description of the notification", "\"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: pulumi.Input[str]): pulumi.set(self,", "pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value)", "\"organization\", organization) if pubsub_topic is not None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic)", "def config_id(self, value: pulumi.Input[str]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def", "pulumi.set(__self__, \"name\", name) if organization is not None: pulumi.set(__self__, \"organization\",", "\"\"\" The description of the notification config (max of 1024", "name, id, and optional extra properties used to qualify the", "instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id", "if organization is None and not opts.urn: raise TypeError(\"Missing required", "config (max of 1024 characters). \"\"\" return pulumi.get(self, \"description\") @property", "None, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, name:", "arguments to use to populate this resource's properties. :param pulumi.ResourceOptions", "Notification Config lives in. \"\"\" return pulumi.get(self, \"organization\") @organization.setter def", "pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config for triggering streaming-based notifications. Structure is", "\"\"\" The set of arguments for constructing a NotificationConfig resource.", "NotificationConfig resources. :param pulumi.Input[str] config_id: This must be unique within", "@pulumi.getter(name=\"serviceAccount\") def service_account(self) -> Optional[pulumi.Input[str]]: \"\"\" The service account that", "@property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: \"\"\" The resource name", "the configuration to send notifications for create/update events of findings,", "str resource_name: The name of the resource. :param NotificationConfigArgs args:", "[SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so, you may run into errors", "-> pulumi.Output[str]: \"\"\" The Pub/Sub topic to send notifications to.", "TypeError(\"Missing required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"] = None", "\"service_account\") @service_account.setter def service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value) @property", "Optional[pulumi.ResourceOptions] = None): \"\"\" A Cloud Security Command Center (Cloud", "below. \"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']):", "@organization.setter def organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\")", "value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: \"\"\" The resource", "description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @property @pulumi.getter def name(self)", "service_account: The service account that needs \"pubsub.topics.publish\" permission to publish", "\"description\", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "description if organization is None and not opts.urn: raise TypeError(\"Missing", "str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] =", "pulumi.set(self, \"name\", value) @property @pulumi.getter def organization(self) -> Optional[pulumi.Input[str]]: \"\"\"", "@property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Output[str]: \"\"\" The Pub/Sub topic", "config_id) if description is not None: pulumi.set(__self__, \"description\", description) if", "organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]]", "name: The resource name of this notification config, in the", "\"description\", value) @pulumi.input_type class _NotificationConfigState: def __init__(__self__, *, config_id: Optional[pulumi.Input[str]]", "Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config for triggering streaming-based notifications. Structure is", "streaming_config is None and not opts.urn: raise TypeError(\"Missing required property", "you're certain you know what you are doing! *** import", "coding=utf-8 # *** WARNING: this file was generated by the", "(tfgen) Tool. *** # *** Do not edit by hand", "'config_id'\") __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description if organization is", "opts.urn: raise TypeError(\"Missing required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"]", "def description(self) -> Optional[pulumi.Input[str]]: \"\"\" The description of the notification", "resources. :param pulumi.Input[str] config_id: This must be unique within the", "Security Command Center Finding Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category =", "is None and not opts.urn: raise TypeError(\"Missing required property 'organization'\")", "@pulumi.getter(name=\"configId\") def config_id(self) -> Optional[pulumi.Input[str]]: \"\"\" This must be unique", "\"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account: The service account that needs \"pubsub.topics.publish\"", "pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> pulumi.Output[str]: \"\"\" The", "documented below. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState)", "combination with a valid opts.id to get an existing resource')", "from .. import _utilities from . import outputs from ._inputs", "opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]]", "TypeError(\"Missing required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic if streaming_config is", "if opts.version is None: opts.version = _utilities.get_version() if opts.id is", "pulumi.set(__self__, \"description\", description) if name is not None: pulumi.set(__self__, \"name\",", "default {{organization}}/{{name}} ``` :param str resource_name: The name of the", "None: pulumi.set(__self__, \"name\", name) if organization is not None: pulumi.set(__self__,", "see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs)", "below. \"\"\" ... @overload def __init__(__self__, resource_name: str, args: NotificationConfigArgs,", "\"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config) if description is not None:", "an existing resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is None", "pubsub_topic: Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]]", "Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] =", ":param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications to.", "How-to Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage ### Scc", "config (max of 1024 characters). \"\"\" return pulumi.get(self, \"description\") @description.setter", "__props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description if organization is None", "to use Cloud SCC resources, your organization must be enrolled", "This must be unique within the organization. \"\"\" return pulumi.get(self,", "streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig': \"\"\" Get an existing", "pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config for triggering streaming-based notifications. Structure is", "unique within the organization. :param pulumi.Input[str] organization: The organization whose", "super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts) @staticmethod def get(resource_name: str,", "organization(self) -> pulumi.Input[str]: \"\"\" The organization whose Cloud Security Command", "pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The", "an existing NotificationConfig resource's state with the given name, id,", "passed in combination with a valid opts.id to get an", "config for triggering streaming-based notifications. Structure is documented below. \"\"\"", "WARNING: this file was generated by the Pulumi Terraform Bridge", "within the organization. :param pulumi.Input[str] description: The description of the", "SCC resources, your organization must be enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center).", "\"\"\" if config_id is not None: pulumi.set(__self__, \"config_id\", config_id) if", "pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] = None): \"\"\" The set", "(max of 1024 characters). \"\"\" return pulumi.get(self, \"description\") @description.setter def", "Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] =", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) ->", "resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str]", "pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $ pulumi import", "properties used to qualify the lookup. :param str resource_name: The", "= service_account __props__.__dict__[\"streaming_config\"] = streaming_config return NotificationConfig(resource_name, opts=opts, __props__=__props__) @property", "documented below. \"\"\" if config_id is not None: pulumi.set(__self__, \"config_id\",", "Mapping, Optional, Sequence, Union, overload from .. import _utilities from", "@streaming_config.setter def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value) class NotificationConfig(pulumi.CustomResource):", "Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig': \"\"\"", "with the given name, id, and optional extra properties used", "@pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config for triggering", ":param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based notifications. Structure", "pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value)", "def name(self) -> pulumi.Output[str]: \"\"\" The resource name of this", "documented below. \"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value:", "__props__=None): \"\"\" A Cloud Security Command Center (Cloud SCC) notification", "to be a ResourceOptions instance') if opts.version is None: opts.version", "pulumi.set(__self__, \"service_account\", service_account) if streaming_config is not None: pulumi.set(__self__, \"streaming_config\",", "@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None,", "state with the given name, id, and optional extra properties", "gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ``` :param str resource_name: The name of", "name of the resulting resource. :param pulumi.Input[str] id: The unique", "Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] =", "and optional extra properties used to qualify the lookup. :param", "return pulumi.get(self, \"config_id\") @property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: \"\"\"", "value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Input[str]: \"\"\" The Pub/Sub", "existing NotificationConfig resource's state with the given name, id, and", "description(self) -> Optional[pulumi.Input[str]]: \"\"\" The description of the notification config", "\"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Output[str]: \"\"\" The Pub/Sub", "import Any, Mapping, Optional, Sequence, Union, overload from .. import", "__props__.__dict__[\"pubsub_topic\"] = pubsub_topic if streaming_config is None and not opts.urn:", "the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do", "def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config for triggering streaming-based", "$ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $ pulumi", "pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] =", "def __init__(__self__, *, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] =", "value) @property @pulumi.getter def organization(self) -> pulumi.Input[str]: \"\"\" The organization", "is None and not opts.urn: raise TypeError(\"Missing required property 'config_id'\")", "organization=\"123456789\", description=\"My custom Cloud Security Command Center Finding Notification Configuration\",", "streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config for triggering streaming-based notifications.", "organization. :param pulumi.Input[str] organization: The organization whose Cloud Security Command", "is not None: pulumi.set(__self__, \"service_account\", service_account) if streaming_config is not", "constructing a NotificationConfig resource. :param pulumi.Input[str] config_id: This must be", "@pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\" The description of the", "@pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config for triggering", "'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs: def __init__(__self__, *, config_id: pulumi.Input[str], organization:", "edit by hand unless you're certain you know what you", "in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization: The organization whose", "not None: pulumi.set(__self__, \"organization\", organization) if pubsub_topic is not None:", "pulumi.set(self, \"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\"", "Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]:", "None: pulumi.set(__self__, \"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\") def config_id(self) -> Optional[pulumi.Input[str]]:", "import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig", "**kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else:", "get more information about NotificationConfig, see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) *", "Finding Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\" AND state", "## Import NotificationConfig can be imported using any of these", "The organization whose Cloud Security Command Center the Notification Config", "return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: pulumi.Input[str]): pulumi.set(self, \"config_id\",", "service_account) if streaming_config is not None: pulumi.set(__self__, \"streaming_config\", streaming_config) @property", "\"config_id\", config_id) if description is not None: pulumi.set(__self__, \"description\", description)", "@config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter", "resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options", "NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" A Cloud Security Command", "permission to publish to the Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config:", "streaming_config: The config for triggering streaming-based notifications. Structure is documented", "Center (Cloud SCC) notification configs. A notification config is a", "### Scc Notification Config Basic ```python import pulumi import pulumi_gcp", "to publish to the Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The", "streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value) class NotificationConfig(pulumi.CustomResource): @overload def", "streaming_config return NotificationConfig(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\") def config_id(self) ->", "* How-to Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage ###", "= _utilities.get_version() if opts.id is None: if __props__ is not", "None: if __props__ is not None: raise TypeError('__props__ is only", "__init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions,", "file was generated by the Pulumi Terraform Bridge (tfgen) Tool.", "description __props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic", "pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Input[str]: \"\"\"", "None: raise TypeError('__props__ is only valid when passed in combination", "is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs)", "resource. \"\"\" ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args,", "\"pubsub.topics.publish\" permission to publish to the Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs']", "to. Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @property", "pulumi.get(self, \"service_account\") @service_account.setter def service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value)", "__props__.__dict__[\"organization\"] = organization if pubsub_topic is None and not opts.urn:", "streaming-based notifications. Structure is documented below. \"\"\" return pulumi.get(self, \"streaming_config\")", "None: opts.version = _utilities.get_version() if opts.id is None: if __props__", ":param str resource_name: The name of the resource. :param pulumi.ResourceOptions", "str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] = None, description:", "= pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options", "of the resource. :param pulumi.ResourceOptions opts: Options for the resource.", "arguments for constructing a NotificationConfig resource. :param pulumi.Input[str] config_id: This", "__props__=None): if opts is None: opts = pulumi.ResourceOptions() if not", "property 'config_id'\") __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description if organization", "property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"] =", "service_account is not None: pulumi.set(__self__, \"service_account\", service_account) if streaming_config is", "@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id:", "pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if service_account is not None: pulumi.set(__self__, \"service_account\",", "to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for", "\"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based notifications.", "value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config", "is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account: The service account that needs", "return pulumi.get(self, \"description\") @property @pulumi.getter def name(self) -> pulumi.Output[str]: \"\"\"", "Structure is documented below. :param pulumi.Input[str] description: The description of", "is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based", "Structure is documented below. \"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def", "is not None: pulumi.set(__self__, \"description\", description) @property @pulumi.getter(name=\"configId\") def config_id(self)", "pulumi.get(self, \"organization\") @organization.setter def organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value)", "send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config:", "import _utilities from . import outputs from ._inputs import *", "pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence,", "\"\"\" This must be unique within the organization. \"\"\" return", "\"service_account\", service_account) if streaming_config is not None: pulumi.set(__self__, \"streaming_config\", streaming_config)", "pulumi.set(__self__, \"config_id\", config_id) if description is not None: pulumi.set(__self__, \"description\",", "None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if", "raise TypeError(\"Missing required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"] =", "None: pulumi.set(__self__, \"config_id\", config_id) if description is not None: pulumi.set(__self__,", "def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]]", "that contains the configuration to send notifications for create/update events", "return pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> pulumi.Output[str]: \"\"\"", "to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account: The service", "if __props__ is not None: raise TypeError('__props__ is only valid", "of 1024 characters). \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def", "Cloud Security Command Center the Notification Config lives in. \"\"\"", "pubsub_topic) if service_account is not None: pulumi.set(__self__, \"service_account\", service_account) if", "lives in. \"\"\" return pulumi.get(self, \"organization\") @organization.setter def organization(self, value:", "\"\"\" The service account that needs \"pubsub.topics.publish\" permission to publish", "@property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Output[str]: \"\"\" This must be", "__props__=__props__) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Output[str]: \"\"\" This must", "-> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config for triggering streaming-based notifications. Structure", "lives in. :param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send", "config_id(self) -> Optional[pulumi.Input[str]]: \"\"\" This must be unique within the", "def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs,", "opts.urn: raise TypeError(\"Missing required property 'organization'\") __props__.__dict__[\"organization\"] = organization if", "not None: pulumi.set(__self__, \"name\", name) if organization is not None:", "pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\") def service_account(self)", "= None, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None,", ". import outputs from ._inputs import * __all__ = ['NotificationConfigArgs',", "Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] =", "must be unique within the organization. :param pulumi.Input[str] description: The", "pulumi.Output[str]: \"\"\" The organization whose Cloud Security Command Center the", "pulumi.set(self, \"streaming_config\", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\"", "@pulumi.input_type class _NotificationConfigState: def __init__(__self__, *, config_id: Optional[pulumi.Input[str]] = None,", "**resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts:", "the Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering", "is not None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if service_account is not", "__all__ = ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs: def __init__(__self__, *,", "Optional[pulumi.Input[str]]: \"\"\" The description of the notification config (max of", "@streaming_config.setter def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value) @property @pulumi.getter", "-> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config for triggering streaming-based notifications. Structure", "resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] = None,", "1024 characters). :param pulumi.Input[str] name: The resource name of this", "Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\" Input properties", "not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a", "opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str,", "lives in. \"\"\" return pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self)", "\"config_id\", config_id) pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\",", "opts: Options for the resource. \"\"\" ... def __init__(__self__, resource_name:", "Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): \"\"\" A", "The resource name of this notification config, in the format", "organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"] = streaming_config", "that needs \"pubsub.topics.publish\" permission to publish to the Pub/Sub topic.", "\"name\") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value) @property", "import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union,", "value) @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> Optional[pulumi.Input[str]]: \"\"\" The service", "@pulumi.getter def organization(self) -> pulumi.Input[str]: \"\"\" The organization whose Cloud", "return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\",", "the given name, id, and optional extra properties used to", "extra properties used to qualify the lookup. :param str resource_name:", "to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return", "to the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @service_account.setter def", "\"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: Optional[pulumi.Input[str]]):", "this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization:", "opts: Optional[pulumi.ResourceOptions] = None): \"\"\" A Cloud Security Command Center", "@pulumi.getter def organization(self) -> pulumi.Output[str]: \"\"\" The organization whose Cloud", "up and filtering NotificationConfig resources. :param pulumi.Input[str] config_id: This must", "Config lives in. \"\"\" return pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def", "str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs)", "needs \"pubsub.topics.publish\" permission to publish to the Pub/Sub topic. \"\"\"", "of findings, assets and etc. > **Note:** In order to", "= _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"] =", "resource_name: The unique name of the resulting resource. :param pulumi.Input[str]", "only valid when passed in combination with a valid opts.id", "* __all__ = ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs: def __init__(__self__,", "assets and etc. > **Note:** In order to use Cloud", "warnings import pulumi import pulumi.runtime from typing import Any, Mapping,", "@pulumi.input_type class NotificationConfigArgs: def __init__(__self__, *, config_id: pulumi.Input[str], organization: pulumi.Input[str],", "-> Optional[pulumi.Input[str]]: \"\"\" The resource name of this notification config,", "about NotificationConfig, see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides *", "pulumi.get(self, \"name\") @property @pulumi.getter def organization(self) -> pulumi.Output[str]: \"\"\" The", "send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self,", "name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value) @property @pulumi.getter def organization(self)", "you know what you are doing! *** import warnings import", "\"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) ->", "def pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub topic to send", "None, pubsub_topic: Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] = None, streaming_config:", "Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] =", "Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): if opts", "config_id __props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"] = organization", "__self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name:", "for triggering streaming-based notifications. Structure is documented below. \"\"\" if", "'NotificationConfig': \"\"\" Get an existing NotificationConfig resource's state with the", "resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args,", "contains the configuration to send notifications for create/update events of", "name of the resource. :param pulumi.ResourceOptions opts: Options for the", "pulumi.Input[str]: \"\"\" The organization whose Cloud Security Command Center the", "``` ## Import NotificationConfig can be imported using any of", "if description is not None: pulumi.set(__self__, \"description\", description) @property @pulumi.getter(name=\"configId\")", "pulumi.ResourceOptions opts: Options for the resource. \"\"\" ... def __init__(__self__,", "*** Do not edit by hand unless you're certain you", "TypeError(\"Missing required property 'config_id'\") __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description", "the resource. :param NotificationConfigArgs args: The arguments to use to", "opts.urn: raise TypeError(\"Missing required property 'config_id'\") __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"]", "._inputs import * __all__ = ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs:", "the Notification Config lives in. :param pulumi.Input[str] pubsub_topic: The Pub/Sub", "Notification Config Basic ```python import pulumi import pulumi_gcp as gcp", "None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): if opts is None:", "['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs: def __init__(__self__, *, config_id: pulumi.Input[str],", "populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the", "pulumi.Input[str] description: The description of the notification config (max of", "= config_id __props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"] =", "doing! *** import warnings import pulumi import pulumi.runtime from typing", "the notification config (max of 1024 characters). :param pulumi.Input[str] name:", "config_id: pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]]", "filtering NotificationConfig resources. :param pulumi.Input[str] config_id: This must be unique", ")) ``` ## Import NotificationConfig can be imported using any", "-> pulumi.Output[Optional[str]]: \"\"\" The description of the notification config (max", "Cloud Security Command Center Finding Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category", "typing import Any, Mapping, Optional, Sequence, Union, overload from ..", "-> pulumi.Input[str]: \"\"\" This must be unique within the organization.", "Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\" AND state =", "to send notifications for create/update events of findings, assets and", "\"name\") @property @pulumi.getter def organization(self) -> pulumi.Output[str]: \"\"\" The organization", "\"\"\" return pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']:", "def organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def", "-> pulumi.Output[str]: \"\"\" The resource name of this notification config,", "Options for the resource. \"\"\" ... def __init__(__self__, resource_name: str,", "the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization: The organization whose Cloud", "def pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\") def", "pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\"", "__props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"]", "triggering streaming-based notifications. Structure is documented below. \"\"\" return pulumi.get(self,", "\"description\") @property @pulumi.getter def name(self) -> pulumi.Output[str]: \"\"\" The resource", "\"\"\" pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic)", "opts.id to get an existing resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if", "enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so, you may run", "lookup. :param str resource_name: The unique name of the resulting", "return pulumi.get(self, \"name\") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\",", "opts.id is None: if __props__ is not None: raise TypeError('__props__", "The unique name of the resulting resource. :param pulumi.Input[str] id:", "pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] = None): \"\"\" The", "in. \"\"\" return pulumi.get(self, \"organization\") @organization.setter def organization(self, value: pulumi.Input[str]):", "``` :param str resource_name: The name of the resource. :param", "opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] =", "__props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"]", "-> Optional[pulumi.Input[str]]: \"\"\" This must be unique within the organization.", "send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account:", "is documented below. \"\"\" ... @overload def __init__(__self__, resource_name: str,", "pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config) if", "characters). :param pulumi.Input[str] name: The resource name of this notification", "config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def description(self)", "return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\",", "to the Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for", "set of arguments for constructing a NotificationConfig resource. :param pulumi.Input[str]", "= gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My custom Cloud Security Command Center", "@pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Input[str]: \"\"\" This must be unique", "qualify the lookup. :param str resource_name: The unique name of", "organization. \"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: pulumi.Input[str]):", "Cloud SCC resources, your organization must be enrolled in [SCC", "import outputs from ._inputs import * __all__ = ['NotificationConfigArgs', 'NotificationConfig']", "Pub/Sub topic to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\".", "creation. To get more information about NotificationConfig, see: * [API", "@organization.setter def organization(self, value: pulumi.Input[str]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\")", "characters). \"\"\" pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__, \"pubsub_topic\",", "resource_name: The name of the resource. :param NotificationConfigArgs args: The", "\"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The", "Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): \"\"\" A Cloud Security Command Center", "= description __props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"] =", "value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config", "is not None: pulumi.set(__self__, \"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\") def config_id(self)", "Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for", "__props__.__dict__[\"service_account\"] = None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts) @staticmethod", "not edit by hand unless you're certain you know what", "used for looking up and filtering NotificationConfig resources. :param pulumi.Input[str]", "\"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def organization(self) -> pulumi.Output[str]:", "given name, id, and optional extra properties used to qualify", "The service account that needs \"pubsub.topics.publish\" permission to publish to", "from . import outputs from ._inputs import * __all__ =", "pulumi.set(self, \"description\", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: \"\"\"", "@property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub topic", "value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) ->", "def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config for triggering streaming-based", "publish to the Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config", "not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def", "'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @name.setter def name(self, value: Optional[pulumi.Input[str]]):", "findings, assets and etc. > **Note:** In order to use", "\"organization\", organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config) if description", "Center the Notification Config lives in. \"\"\" return pulumi.get(self, \"organization\")", "resource that contains the configuration to send notifications for create/update", "be unique within the organization. \"\"\" return pulumi.get(self, \"config_id\") @property", "Usage ### Scc Notification Config Basic ```python import pulumi import", "pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] =", "used to qualify the lookup. :param str resource_name: The unique", "@name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value) @property @pulumi.getter", "you are doing! *** import warnings import pulumi import pulumi.runtime", "def pubsub_topic(self) -> pulumi.Output[str]: \"\"\" The Pub/Sub topic to send", "-> pulumi.Input[str]: \"\"\" The organization whose Cloud Security Command Center", "... @overload def __init__(__self__, resource_name: str, args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions]", "'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"] = None", "below. \"\"\" if config_id is not None: pulumi.set(__self__, \"config_id\", config_id)", "None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig': \"\"\" Get an", "notifications. Structure is documented below. \"\"\" if config_id is not", "the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param", "'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str],", "service_account(self) -> pulumi.Output[str]: \"\"\" The service account that needs \"pubsub.topics.publish\"", "is documented below. \"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self,", "pulumi.Output[Optional[str]]: \"\"\" The description of the notification config (max of", "required property 'organization'\") __props__.__dict__[\"organization\"] = organization if pubsub_topic is None", "config_id(self) -> pulumi.Input[str]: \"\"\" This must be unique within the", "optional extra properties used to qualify the lookup. :param str", "Cloud Security Command Center the Notification Config lives in. :param", "= \\\"OPEN_FIREWALL\\\" AND state = \\\"ACTIVE\\\"\", )) ``` ## Import", "service account that needs \"pubsub.topics.publish\" permission to publish to the", "TypeError(\"Missing required property 'organization'\") __props__.__dict__[\"organization\"] = organization if pubsub_topic is", "Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] =", "= None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None)", "import pulumi import pulumi.runtime from typing import Any, Mapping, Optional,", "Sequence, Union, overload from .. import _utilities from . import", "a Cloud SCC resource that contains the configuration to send", "TypeError('Expected resource options to be a ResourceOptions instance') if opts.version", "@property @pulumi.getter def organization(self) -> pulumi.Input[str]: \"\"\" The organization whose", "name of the resource. :param NotificationConfigArgs args: The arguments to", "__init__(__self__, *, config_id: pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'],", "config_id) pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config)", "for triggering streaming-based notifications. Structure is documented below. :param pulumi.Input[str]", "existing resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is None and", ":param pulumi.Input[str] id: The unique provider ID of the resource", "str resource_name: The unique name of the resulting resource. :param", "characters). \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def name(self) ->", "Import NotificationConfig can be imported using any of these accepted", "is not None: pulumi.set(__self__, \"config_id\", config_id) if description is not", "Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value) @property @pulumi.getter def organization(self) -> Optional[pulumi.Input[str]]:", "notification config (max of 1024 characters). :param pulumi.Input[str] organization: The", "your organization must be enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing", "pubsub_topic: The Pub/Sub topic to send notifications to. Its format", "Cloud Security Command Center (Cloud SCC) notification configs. A notification", "None and not opts.urn: raise TypeError(\"Missing required property 'organization'\") __props__.__dict__[\"organization\"]", "doing so, you may run into errors during resource creation.", "The name of the resource. :param NotificationConfigArgs args: The arguments", "of 1024 characters). :param pulumi.Input[str] organization: The organization whose Cloud", "This must be unique within the organization. :param pulumi.Input[str] organization:", "\"\"\" The Pub/Sub topic to send notifications to. Its format", "notifications. Structure is documented below. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))", "pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Output[str]: \"\"\" The", "permission to publish to the Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config:", "if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be", "errors during resource creation. To get more information about NotificationConfig,", "The name of the resource. :param pulumi.ResourceOptions opts: Options for", "config (max of 1024 characters). :param pulumi.Input[str] organization: The organization", "def service_account(self) -> Optional[pulumi.Input[str]]: \"\"\" The service account that needs", "for triggering streaming-based notifications. Structure is documented below. \"\"\" ...", "to publish to the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\")", "characters). \"\"\" return pulumi.get(self, \"description\") @description.setter def description(self, value: Optional[pulumi.Input[str]]):", "Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\")", "formats ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh", "**kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id:", "= None): \"\"\" A Cloud Security Command Center (Cloud SCC)", "not opts.urn: raise TypeError(\"Missing required property 'organization'\") __props__.__dict__[\"organization\"] = organization", "to qualify the lookup. :param str resource_name: The unique name", "gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My custom Cloud Security", "the lookup. :param str resource_name: The unique name of the", "resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts:", "this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self,", "topic to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param", "service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self)", "the notification config (max of 1024 characters). :param pulumi.Input[str] organization:", "\"description\", description) if name is not None: pulumi.set(__self__, \"name\", name)", "Command Center the Notification Config lives in. :param pulumi.Input[str] pubsub_topic:", "Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so, you may run into errors during", "streaming-based notifications. Structure is documented below. \"\"\" opts = pulumi.ResourceOptions.merge(opts,", "= _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None:", "return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\",", "more information about NotificationConfig, see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to", "@pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config for triggering", "resource name of this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'.", "resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. \"\"\"", "= None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig': \"\"\" Get", "\"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based notifications.", "= name __props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"] =", "description) if name is not None: pulumi.set(__self__, \"name\", name) if", "\"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "\"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self,", "notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization: The", "raise TypeError('__props__ is only valid when passed in combination with", "def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value) @property @pulumi.getter def", "``` ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ``` :param", "not opts.urn: raise TypeError(\"Missing required property 'config_id'\") __props__.__dict__[\"config_id\"] = config_id", "pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\"", "run into errors during resource creation. To get more information", "\"\"\" return pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> pulumi.Output[str]:", ".. import _utilities from . import outputs from ._inputs import", "of 1024 characters). \"\"\" return pulumi.get(self, \"description\") @description.setter def description(self,", "@description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @property @pulumi.getter", "description is not None: pulumi.set(__self__, \"description\", description) if name is", "value) class NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]", "str resource_name: The name of the resource. :param pulumi.ResourceOptions opts:", "(Cloud SCC) notification configs. A notification config is a Cloud", "is documented below. :param pulumi.Input[str] description: The description of the", "@property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\" The description of", "in. \"\"\" return pulumi.get(self, \"organization\") @organization.setter def organization(self, value: Optional[pulumi.Input[str]]):", "Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based", "format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization: The organization whose Cloud Security", "Tool. *** # *** Do not edit by hand unless", "def service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\") def", "opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not", "return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\",", "None, name: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic:", "\"config_id\", value) @property @pulumi.getter def organization(self) -> pulumi.Input[str]: \"\"\" The", "= None, name: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None,", "= streaming_config return NotificationConfig(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\") def config_id(self)", "value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value) class NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__,", "\"\"\" return pulumi.get(self, \"service_account\") @service_account.setter def service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "organization must be enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so,", "(max of 1024 characters). \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter", "@property @pulumi.getter def organization(self) -> pulumi.Output[str]: \"\"\" The organization whose", "filter=\"category = \\\"OPEN_FIREWALL\\\" AND state = \\\"ACTIVE\\\"\", )) ``` ##", "raise TypeError(\"Missing required property 'organization'\") __props__.__dict__[\"organization\"] = organization if pubsub_topic", "be unique within the organization. :param pulumi.Input[str] organization: The organization", "def __init__(__self__, *, config_id: pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config:", "pubsub_topic: Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]", "description=\"My custom Cloud Security Command Center Finding Notification Configuration\", pubsub_topic=scc_notification.id,", "value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\" The description", "characters). :param pulumi.Input[str] organization: The organization whose Cloud Security Command", "id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] = None,", "in. \"\"\" return pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) ->", "Config lives in. \"\"\" return pulumi.get(self, \"organization\") @organization.setter def organization(self,", "opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions):", "to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs']", "-> 'NotificationConfig': \"\"\" Get an existing NotificationConfig resource's state with", "Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]:", "notification config is a Cloud SCC resource that contains the", "= gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My custom Cloud", "(max of 1024 characters). :param pulumi.Input[str] name: The resource name", "opts.version is None: opts.version = _utilities.get_version() if opts.id is None:", "hand unless you're certain you know what you are doing!", "-> Optional[pulumi.Input[str]]: \"\"\" The service account that needs \"pubsub.topics.publish\" permission", "pulumi.set(self, \"config_id\", value) @property @pulumi.getter def organization(self) -> pulumi.Input[str]: \"\"\"", "def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value) class NotificationConfig(pulumi.CustomResource): @overload", "pulumi.Input[str] config_id: This must be unique within the organization. :param", "pulumi.set(__self__, \"description\", description) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Input[str]: \"\"\"", ":param str resource_name: The unique name of the resulting resource.", "def pubsub_topic(self) -> pulumi.Input[str]: \"\"\" The Pub/Sub topic to send", "notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The", "__props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"]", "\"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value) @property", "\"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]]", "a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version()", "@pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub topic to", "publish to the Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config", "description: Optional[pulumi.Input[str]] = None): \"\"\" The set of arguments for", "= streaming_config __props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"] = None super(NotificationConfig, __self__).__init__(", "the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def", "def config_id(self) -> pulumi.Input[str]: \"\"\" This must be unique within", "= None, __props__=None): if opts is None: opts = pulumi.ResourceOptions()", "what you are doing! *** import warnings import pulumi import", "This must be unique within the organization. :param pulumi.Input[str] description:", "\"\"\" return pulumi.get(self, \"organization\") @organization.setter def organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "= organization if pubsub_topic is None and not opts.urn: raise", "return NotificationConfig(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Output[str]:", "organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self)", "\"streaming_config\") @streaming_config.setter def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value) class", "= None): \"\"\" The set of arguments for constructing a", "```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ``` :param str", "config_id __props__.__dict__[\"description\"] = description if organization is None and not", "\"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] =", "Options for the resource. :param pulumi.Input[str] config_id: This must be", "resources, your organization must be enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without", "= None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\" Input properties used", "pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ``` :param str resource_name: The", "\"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config", "triggering streaming-based notifications. Structure is documented below. :param pulumi.Input[str] description:", "pubsub_topic __props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"] = streaming_config return NotificationConfig(resource_name, opts=opts,", "(max of 1024 characters). \"\"\" pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__, \"organization\",", "config_id is not None: pulumi.set(__self__, \"config_id\", config_id) if description is", "name) if organization is not None: pulumi.set(__self__, \"organization\", organization) if", ":param pulumi.ResourceOptions opts: Options for the resource. \"\"\" ... def", "-> Optional[pulumi.Input[str]]: \"\"\" The organization whose Cloud Security Command Center", "the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def", "create/update events of findings, assets and etc. > **Note:** In", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @pulumi.input_type class _NotificationConfigState: def __init__(__self__,", "NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None,", "pulumi_gcp as gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\",", "raise TypeError(\"Missing required property 'config_id'\") __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] =", "resource. :param NotificationConfigArgs args: The arguments to use to populate", "\"\"\" ... @overload def __init__(__self__, resource_name: str, args: NotificationConfigArgs, opts:", "format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self,", "pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value)", "the resource to lookup. :param pulumi.ResourceOptions opts: Options for the", "pubsub_topic if streaming_config is None and not opts.urn: raise TypeError(\"Missing", "required property 'config_id'\") __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description if", "None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): \"\"\" A Cloud Security", "get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]]", "Center the Notification Config lives in. :param pulumi.Input[str] pubsub_topic: The", "config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @property", "streaming_config) @property @pulumi.getter(name=\"configId\") def config_id(self) -> Optional[pulumi.Input[str]]: \"\"\" This must", "to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config", "unique provider ID of the resource to lookup. :param pulumi.ResourceOptions", "pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] config_id: This", "can be imported using any of these accepted formats ```sh", "*args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__)", "for triggering streaming-based notifications. Structure is documented below. \"\"\" opts", "streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] = None): \"\"\" The set of", "if pubsub_topic is not None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if service_account", "name(self) -> Optional[pulumi.Input[str]]: \"\"\" The resource name of this notification", "description is not None: pulumi.set(__self__, \"description\", description) @property @pulumi.getter(name=\"configId\") def", "the resource. :param pulumi.Input[str] config_id: This must be unique within", "of 1024 characters). :param pulumi.Input[str] name: The resource name of", "Optional[pulumi.Input[str]]: \"\"\" The organization whose Cloud Security Command Center the", "def __init__(__self__, resource_name: str, args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] = None):", "__props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"] = None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name,", "pulumi.Output[str]: \"\"\" This must be unique within the organization. \"\"\"", "format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account: The service account that", "format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering", "Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage ### Scc Notification", "pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] = None): \"\"\" The set of arguments", "not None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if service_account is not None:", "streaming_config is not None: pulumi.set(__self__, \"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\") def", "return pulumi.get(self, \"description\") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\",", "description: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]]", "organization(self, value: pulumi.Input[str]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self)", "\"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\") def config_id(self) -> Optional[pulumi.Input[str]]: \"\"\" This", "\"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value) @property", "not None: pulumi.set(__self__, \"description\", description) if name is not None:", "the notification config (max of 1024 characters). \"\"\" return pulumi.get(self,", "streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\" Input properties used for looking", "and not opts.urn: raise TypeError(\"Missing required property 'organization'\") __props__.__dict__[\"organization\"] =", "topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based notifications.", "class NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] =", "value: pulumi.Input[str]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) ->", "return pulumi.get(self, \"organization\") @organization.setter def organization(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\",", "*args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None,", "@overload def __init__(__self__, resource_name: str, args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] =", "is a Cloud SCC resource that contains the configuration to", "is None and not opts.urn: raise TypeError(\"Missing required property 'streaming_config'\")", "def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def", "is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def service_account(self)", "None, description: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, organization:", "Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @service_account.setter def service_account(self, value:", "notification config (max of 1024 characters). :param pulumi.Input[str] name: The", "\"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Input[str]: \"\"\" The", "properties. :param pulumi.ResourceOptions opts: Options for the resource. \"\"\" ...", "opts.version = _utilities.get_version() if opts.id is None: if __props__ is", "\"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): if opts is None: opts =", "None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__,", "of the resource. :param NotificationConfigArgs args: The arguments to use", "required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"]", "description: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]]", "pulumi.Input[str] organization: The organization whose Cloud Security Command Center the", "lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str]", ":param pulumi.Input[str] service_account: The service account that needs \"pubsub.topics.publish\" permission", "@pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: \"\"\" The description of the", "Optional[pulumi.Input[str]]: \"\"\" The resource name of this notification config, in", "pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): \"\"\"", "the organization. \"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value:", "notifications. Structure is documented below. \"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter", "for looking up and filtering NotificationConfig resources. :param pulumi.Input[str] config_id:", "... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts =", "pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\" AND state = \\\"ACTIVE\\\"\", ))", "unique within the organization. \"\"\" return pulumi.get(self, \"config_id\") @property @pulumi.getter", "= NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is None and not opts.urn: raise", "and not opts.urn: raise TypeError(\"Missing required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] =", "organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]]", "Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] =", "streaming_config) if description is not None: pulumi.set(__self__, \"description\", description) @property", "the organization. :param pulumi.Input[str] organization: The organization whose Cloud Security", "NotificationConfig resource's state with the given name, id, and optional", "know what you are doing! *** import warnings import pulumi", "__init__(__self__, *, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None,", "Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities", "@pulumi.getter def organization(self) -> Optional[pulumi.Input[str]]: \"\"\" The organization whose Cloud", "Optional[pulumi.Input[str]]: \"\"\" The service account that needs \"pubsub.topics.publish\" permission to", "pulumi.get(self, \"organization\") @organization.setter def organization(self, value: pulumi.Input[str]): pulumi.set(self, \"organization\", value)", "import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ``` :param str resource_name: The name", "config_id(self, value: pulumi.Input[str]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def organization(self)", "Terraform Bridge (tfgen) Tool. *** # *** Do not edit", "notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The", "publish to the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @service_account.setter", "= None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts) @staticmethod def", "service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\" Input", "description of the notification config (max of 1024 characters). \"\"\"", "pulumi.Input[str]: \"\"\" The Pub/Sub topic to send notifications to. Its", "Security Command Center the Notification Config lives in. \"\"\" return", "notification config (max of 1024 characters). \"\"\" pulumi.set(__self__, \"config_id\", config_id)", "is documented below. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ =", "config (max of 1024 characters). :param pulumi.Input[str] name: The resource", "for the resource. \"\"\" ... def __init__(__self__, resource_name: str, *args,", "@pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: \"\"\" The resource name of", "Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based", "resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is", "by hand unless you're certain you know what you are", "resource. :param pulumi.Input[str] id: The unique provider ID of the", "\"streaming_config\") @streaming_config.setter def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value) @property", "organization) if pubsub_topic is not None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if", "format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering", "* [Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage ### Scc Notification Config", "pulumi.Input[str] name: The resource name of this notification config, in", "\"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> pulumi.Output[str]: \"\"\" The service", "below. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"]", "@property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config for", "\"description\", description) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Input[str]: \"\"\" This", "@service_account.setter def service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\")", "gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default", "outputs from ._inputs import * __all__ = ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type", "@pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\")", "organization: The organization whose Cloud Security Command Center the Notification", "_NotificationConfigState: def __init__(__self__, *, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]]", "documented below. :param pulumi.Input[str] description: The description of the notification", "in combination with a valid opts.id to get an existing", "= None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): if opts is", "args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" A Cloud Security", "is documented below. \"\"\" if config_id is not None: pulumi.set(__self__,", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @property @pulumi.getter def name(self) ->", "the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @name.setter def name(self,", "raise TypeError(\"Missing required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic if streaming_config", "not opts.urn: raise TypeError(\"Missing required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic", "for constructing a NotificationConfig resource. :param pulumi.Input[str] config_id: This must", "documented below. \"\"\" ... @overload def __init__(__self__, resource_name: str, args:", "$ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ``` :param str resource_name:", "Optional[pulumi.Input[str]]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> Optional[pulumi.Input[str]]:", "of the resource to lookup. :param pulumi.ResourceOptions opts: Options for", "from ._inputs import * __all__ = ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class", "resource. :param pulumi.Input[str] config_id: This must be unique within the", "= None, __props__=None): \"\"\" A Cloud Security Command Center (Cloud", "Config Basic ```python import pulumi import pulumi_gcp as gcp scc_notification", "@property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> pulumi.Output[str]: \"\"\" The service account", "\"\"\" return pulumi.get(self, \"name\") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "within the organization. \"\"\" return pulumi.get(self, \"config_id\") @property @pulumi.getter def", ":param pulumi.Input[str] organization: The organization whose Cloud Security Command Center", ":param pulumi.Input[str] description: The description of the notification config (max", "__self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] =", "organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config) if description is", "\"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The", "\"description\") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @property", "None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected", "not None: pulumi.set(__self__, \"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\") def config_id(self) ->", "@property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Input[str]: \"\"\" This must be", "None and not opts.urn: raise TypeError(\"Missing required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"]", "notifications. Structure is documented below. \"\"\" ... @overload def __init__(__self__,", "@property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]: \"\"\" The config for", "format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @name.setter def name(self, value:", "= organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"] =", "are doing! *** import warnings import pulumi import pulumi.runtime from", "@pubsub_topic.setter def pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\")", "Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] =", "__init__(__self__, resource_name: str, args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\"", "def name(self) -> Optional[pulumi.Input[str]]: \"\"\" The resource name of this", "ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options", "pubsub_topic is not None: pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) if service_account is", "* [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides * [Official Documentation](https://cloud.google.com/security-command-center/docs) ##", "None): \"\"\" A Cloud Security Command Center (Cloud SCC) notification", "pulumi.get(self, \"name\") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value)", "def service_account(self) -> pulumi.Output[str]: \"\"\" The service account that needs", "pulumi.get(self, \"description\") @property @pulumi.getter def name(self) -> pulumi.Output[str]: \"\"\" The", "= None): \"\"\" Input properties used for looking up and", "class _NotificationConfigState: def __init__(__self__, *, config_id: Optional[pulumi.Input[str]] = None, description:", "if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name,", "account that needs \"pubsub.topics.publish\" permission to publish to the Pub/Sub", "Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\" Input properties used for looking up", "pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based notifications. Structure is", "__props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"] = streaming_config return", "str, args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" A Cloud", "use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options", "Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] =", "Scc Notification Config Basic ```python import pulumi import pulumi_gcp as", "triggering streaming-based notifications. Structure is documented below. \"\"\" opts =", "is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise", "# *** Do not edit by hand unless you're certain", "organization whose Cloud Security Command Center the Notification Config lives", "NotificationConfig, see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides * [Official", "\"\"\" The config for triggering streaming-based notifications. Structure is documented", "config is a Cloud SCC resource that contains the configuration", "Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]): pulumi.set(self, \"streaming_config\", value) class NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__, resource_name:", "into errors during resource creation. To get more information about", "pulumi.Output[str]: \"\"\" The Pub/Sub topic to send notifications to. Its", "description: The description of the notification config (max of 1024", "= pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"]", "= description if organization is None and not opts.urn: raise", "of the resulting resource. :param pulumi.Input[str] id: The unique provider", "return pulumi.get(self, \"name\") @property @pulumi.getter def organization(self) -> pulumi.Output[str]: \"\"\"", "\"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def name(self) -> pulumi.Output[str]:", "required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic if streaming_config is None", "__props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"] = name __props__.__dict__[\"organization\"]", "@property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: \"\"\" The description of", "resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args,", "*, config_id: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, name:", "be enrolled in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so, you may", "config (max of 1024 characters). \"\"\" pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__,", ":param NotificationConfigArgs args: The arguments to use to populate this", "class NotificationConfigArgs: def __init__(__self__, *, config_id: pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic:", "needs \"pubsub.topics.publish\" permission to publish to the Pub/Sub topic. :param", "pulumi.get(self, \"config_id\") @config_id.setter def config_id(self, value: pulumi.Input[str]): pulumi.set(self, \"config_id\", value)", "@pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Output[str]: \"\"\" The Pub/Sub topic to", "__props__.__dict__[\"streaming_config\"] = streaming_config __props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"] = None super(NotificationConfig,", "None, description: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic:", "Optional, Sequence, Union, overload from .. import _utilities from .", "Security Command Center (Cloud SCC) notification configs. A notification config", "if streaming_config is None and not opts.urn: raise TypeError(\"Missing required", "None, __props__=None): \"\"\" A Cloud Security Command Center (Cloud SCC)", "the Notification Config lives in. \"\"\" return pulumi.get(self, \"organization\") @property", "a valid opts.id to get an existing resource') __props__ =", "opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Output[str]: \"\"\" This", "not None: pulumi.set(__self__, \"description\", description) @property @pulumi.getter(name=\"configId\") def config_id(self) ->", "@property @pulumi.getter def organization(self) -> Optional[pulumi.Input[str]]: \"\"\" The organization whose", "@pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Input[str]: \"\"\" The Pub/Sub topic to", "pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts,", "_NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"] = name", "= pubsub_topic __props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"] = streaming_config return NotificationConfig(resource_name,", "notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account: The", "@pulumi.getter(name=\"serviceAccount\") def service_account(self) -> pulumi.Output[str]: \"\"\" The service account that", "pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> Optional[pulumi.Input[str]]: \"\"\"", "1024 characters). \"\"\" pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__,", "imported using any of these accepted formats ```sh $ pulumi", "publish to the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @property", "pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value)", "in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center). Without doing so, you may run into", "pulumi import pulumi_gcp as gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config =", "ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if", "SCC) notification configs. A notification config is a Cloud SCC", "pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub topic to send notifications", "when passed in combination with a valid opts.id to get", "pulumi.ResourceOptions(id=id)) __props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description", "Cloud SCC resource that contains the configuration to send notifications", "using any of these accepted formats ```sh $ pulumi import", "pulumi.set(self, \"streaming_config\", value) class NotificationConfig(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str,", "= None, description: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None,", "get an existing resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is", "\"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "= None, description: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None,", "'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def organization(self) ->", "pulumi.set(__self__, \"streaming_config\", streaming_config) if description is not None: pulumi.set(__self__, \"description\",", "@property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Input[str]: \"\"\" The Pub/Sub topic", "-> Optional[pulumi.Input[str]]: \"\"\" The description of the notification config (max", "__props__ = _NotificationConfigState.__new__(_NotificationConfigState) __props__.__dict__[\"config_id\"] = config_id __props__.__dict__[\"description\"] = description __props__.__dict__[\"name\"]", "accepted formats ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ```", "[Official Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage ### Scc Notification Config Basic", "= None, pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None,", "\"pubsub_topic\", pubsub_topic) if service_account is not None: pulumi.set(__self__, \"service_account\", service_account)", "pulumi.Input[str]: \"\"\" This must be unique within the organization. \"\"\"", "\"\"\" ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts", "None and not opts.urn: raise TypeError(\"Missing required property 'config_id'\") __props__.__dict__[\"config_id\"]", "resource's state with the given name, id, and optional extra", "return pulumi.get(self, \"service_account\") @service_account.setter def service_account(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\",", "The unique provider ID of the resource to lookup. :param", "def organization(self) -> Optional[pulumi.Input[str]]: \"\"\" The organization whose Cloud Security", "None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None, service_account:", "= None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None,", "streaming-based notifications. Structure is documented below. :param pulumi.Input[str] description: The", "of these accepted formats ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default", "description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @pulumi.input_type class _NotificationConfigState: def", "'organization'\") __props__.__dict__[\"organization\"] = organization if pubsub_topic is None and not", "Structure is documented below. \"\"\" if config_id is not None:", "property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic if streaming_config is None and", "Optional[pulumi.Input[str]] = None): \"\"\" The set of arguments for constructing", "configs. A notification config is a Cloud SCC resource that", "def config_id(self) -> pulumi.Output[str]: \"\"\" This must be unique within", "be unique within the organization. \"\"\" return pulumi.get(self, \"config_id\") @config_id.setter", "Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[str] service_account: The service account", "In order to use Cloud SCC resources, your organization must", "gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My custom Cloud Security Command Center Finding", "return pulumi.get(self, \"organization\") @organization.setter def organization(self, value: pulumi.Input[str]): pulumi.set(self, \"organization\",", "pubsub_topic is None and not opts.urn: raise TypeError(\"Missing required property", "def organization(self) -> pulumi.Input[str]: \"\"\" The organization whose Cloud Security", "id, and optional extra properties used to qualify the lookup.", "pulumi.Input[str]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def organization(self) -> pulumi.Input[str]:", "@pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Output[str]: \"\"\" This must be unique", "name of this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\"", "\"\"\" return pulumi.get(self, \"config_id\") @property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]:", "organization(self) -> pulumi.Output[str]: \"\"\" The organization whose Cloud Security Command", "SCC resource that contains the configuration to send notifications for", "unless you're certain you know what you are doing! ***", "properties used for looking up and filtering NotificationConfig resources. :param", "pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to", "custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My custom Cloud Security Command", "pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__, \"streaming_config\", streaming_config) if description is not", "pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self)", "organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}} ```", "in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter", "value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value) @property @pulumi.getter def description(self) ->", "\\\"ACTIVE\\\"\", )) ``` ## Import NotificationConfig can be imported using", "import pulumi import pulumi_gcp as gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config", "pulumi.Output[str]: \"\"\" The service account that needs \"pubsub.topics.publish\" permission to", "to publish to the Pub/Sub topic. :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The", "is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value:", "None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) ->", "streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\" AND state = \\\"ACTIVE\\\"\", )) ```", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"service_account\", value) @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) ->", "None __props__.__dict__[\"service_account\"] = None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts)", "if opts.id is None: if __props__ is not None: raise", "Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\" AND state = \\\"ACTIVE\\\"\",", "pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]:", "## Example Usage ### Scc Notification Config Basic ```python import", "__props__.__dict__[\"description\"] = description if organization is None and not opts.urn:", "Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig': \"\"\" Get an existing NotificationConfig", "import * __all__ = ['NotificationConfigArgs', 'NotificationConfig'] @pulumi.input_type class NotificationConfigArgs: def", "the notification config (max of 1024 characters). \"\"\" pulumi.set(__self__, \"config_id\",", "None) -> 'NotificationConfig': \"\"\" Get an existing NotificationConfig resource's state", "name of this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param", "pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications to. Its", "custom Cloud Security Command Center Finding Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs(", "The config for triggering streaming-based notifications. Structure is documented below.", "resource_name: str, args: NotificationConfigArgs, opts: Optional[pulumi.ResourceOptions] = None): \"\"\" A", "Command Center Finding Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\"", "'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic if streaming_config is None and not", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) ->", "unique within the organization. :param pulumi.Input[str] description: The description of", "\"config_id\") @property @pulumi.getter def description(self) -> pulumi.Output[Optional[str]]: \"\"\" The description", "and not opts.urn: raise TypeError(\"Missing required property 'config_id'\") __props__.__dict__[\"config_id\"] =", "def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @property @pulumi.getter def", "send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config:", "def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]]", "# coding=utf-8 # *** WARNING: this file was generated by", "import warnings import pulumi import pulumi.runtime from typing import Any,", "streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']): pulumi.set(self, \"streaming_config\", value) @property @pulumi.getter def description(self)", ":param str resource_name: The name of the resource. :param NotificationConfigArgs", "-> Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub topic to send notifications to.", "config_id(self) -> pulumi.Output[str]: \"\"\" This must be unique within the", "None, service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\"", "pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based notifications. Structure is", "def config_id(self) -> Optional[pulumi.Input[str]]: \"\"\" This must be unique within", "is not None: pulumi.set(__self__, \"organization\", organization) if pubsub_topic is not", "None: pulumi.set(__self__, \"organization\", organization) if pubsub_topic is not None: pulumi.set(__self__,", "\"config_id\") @config_id.setter def config_id(self, value: pulumi.Input[str]): pulumi.set(self, \"config_id\", value) @property", "to send notifications to. Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]", "provider ID of the resource to lookup. :param pulumi.ResourceOptions opts:", "@pulumi.getter def name(self) -> pulumi.Output[str]: \"\"\" The resource name of", "looking up and filtering NotificationConfig resources. :param pulumi.Input[str] config_id: This", "service_account: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig':", "Command Center the Notification Config lives in. \"\"\" return pulumi.get(self,", "gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\", description=\"My", "config_id: This must be unique within the organization. :param pulumi.Input[str]", "Center Finding Notification Configuration\", pubsub_topic=scc_notification.id, streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs( filter=\"category = \\\"OPEN_FIREWALL\\\" AND", "pulumi.get(self, \"description\") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value)", "None and not opts.urn: raise TypeError(\"Missing required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"]", "_internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] =", "pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None): if", "{{organization}}/{{name}} ``` :param str resource_name: The name of the resource.", "is not None: raise TypeError('__props__ is only valid when passed", "pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance')", "= None) -> 'NotificationConfig': \"\"\" Get an existing NotificationConfig resource's", "Example Usage ### Scc Notification Config Basic ```python import pulumi", "pulumi.Input[str]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Input[str]:", "resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is None and not", "information about NotificationConfig, see: * [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs) * How-to Guides", "*** # *** Do not edit by hand unless you're", "None: pulumi.set(__self__, \"description\", description) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Input[str]:", "description of the notification config (max of 1024 characters). :param", "\"\"\" A Cloud Security Command Center (Cloud SCC) notification configs.", "AND state = \\\"ACTIVE\\\"\", )) ``` ## Import NotificationConfig can", "streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']: \"\"\" The config for triggering streaming-based notifications.", "if config_id is None and not opts.urn: raise TypeError(\"Missing required", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def description(self) ->", "value) @property @pulumi.getter def organization(self) -> Optional[pulumi.Input[str]]: \"\"\" The organization", "service_account __props__.__dict__[\"streaming_config\"] = streaming_config return NotificationConfig(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\")", "value) @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> Optional[pulumi.Input[str]]: \"\"\" The Pub/Sub", "name: Optional[pulumi.Input[str]] = None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]]", "The Pub/Sub topic to send notifications to. Its format is", "\"\"\" The resource name of this notification config, in the", "with a valid opts.id to get an existing resource') __props__", "The description of the notification config (max of 1024 characters).", "if organization is not None: pulumi.set(__self__, \"organization\", organization) if pubsub_topic", "else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]", "Config lives in. :param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to", "of this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return", "def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config for triggering streaming-based", "__props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is None and not opts.urn:", "within the organization. :param pulumi.Input[str] organization: The organization whose Cloud", "the organization. \"\"\" return pulumi.get(self, \"config_id\") @property @pulumi.getter def description(self)", "notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\")", "Structure is documented below. \"\"\" opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__", "state = \\\"ACTIVE\\\"\", )) ``` ## Import NotificationConfig can be", "organization(self) -> Optional[pulumi.Input[str]]: \"\"\" The organization whose Cloud Security Command", "notifications. Structure is documented below. :param pulumi.Input[str] description: The description", "may run into errors during resource creation. To get more", "config_id is None and not opts.urn: raise TypeError(\"Missing required property", "organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] = None):", "as gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\", config_id=\"my-config\", organization=\"123456789\",", "None: pulumi.set(__self__, \"service_account\", service_account) if streaming_config is not None: pulumi.set(__self__,", "default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}}", "this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource.", "pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload", ":param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based notifications. Structure", "config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @name.setter", "a NotificationConfig resource. :param pulumi.Input[str] config_id: This must be unique", ":param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] config_id:", "__props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]", ":param pulumi.Input[str] name: The resource name of this notification config,", "opts: Options for the resource. :param pulumi.Input[str] config_id: This must", "-> pulumi.Output['outputs.NotificationConfigStreamingConfig']: \"\"\" The config for triggering streaming-based notifications. Structure", "Union, overload from .. import _utilities from . import outputs", "\"\"\" return pulumi.get(self, \"organization\") @organization.setter def organization(self, value: pulumi.Input[str]): pulumi.set(self,", "description(self) -> pulumi.Output[Optional[str]]: \"\"\" The description of the notification config", "Notification Config lives in. \"\"\" return pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\")", "Without doing so, you may run into errors during resource", "from typing import Any, Mapping, Optional, Sequence, Union, overload from", "must be unique within the organization. \"\"\" return pulumi.get(self, \"config_id\")", "pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] = None, description:", "def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @pulumi.input_type class _NotificationConfigState:", "opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource", "certain you know what you are doing! *** import warnings", "streaming-based notifications. Structure is documented below. \"\"\" if config_id is", "Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\", value) @property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> Optional[pulumi.Input[str]]:", "name __props__.__dict__[\"organization\"] = organization __props__.__dict__[\"pubsub_topic\"] = pubsub_topic __props__.__dict__[\"service_account\"] = service_account", "1024 characters). :param pulumi.Input[str] organization: The organization whose Cloud Security", "if service_account is not None: pulumi.set(__self__, \"service_account\", service_account) if streaming_config", "is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based", "of this notification config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str]", "\"streaming_config\", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "\"description\") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @pulumi.input_type", "pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description: Optional[pulumi.Input[str]] = None): \"\"\"", "def organization(self, value: pulumi.Input[str]): pulumi.set(self, \"organization\", value) @property @pulumi.getter(name=\"pubsubTopic\") def", "the resource. \"\"\" ... def __init__(__self__, resource_name: str, *args, **kwargs):", "\"\"\" return pulumi.get(self, \"description\") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self,", "during resource creation. To get more information about NotificationConfig, see:", "organization if pubsub_topic is None and not opts.urn: raise TypeError(\"Missing", "# *** WARNING: this file was generated by the Pulumi", "TypeError('__props__ is only valid when passed in combination with a", "*, config_id: pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str], streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'], description:", "Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self)", "\"config_id\") @config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value) @property", "None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__, opts) @staticmethod def get(resource_name:", "return pulumi.get(self, \"organization\") @property @pulumi.getter(name=\"pubsubTopic\") def pubsub_topic(self) -> pulumi.Output[str]: \"\"\"", "below. :param pulumi.Input[str] description: The description of the notification config", "not None: raise TypeError('__props__ is only valid when passed in", "Its format is \"projects/[project_id]/topics/[topic]\". :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for", "<filename>sdk/python/pulumi_gcp/securitycenter/notification_config.py # coding=utf-8 # *** WARNING: this file was generated", "opts.urn: raise TypeError(\"Missing required property 'pubsub_topic'\") __props__.__dict__[\"pubsub_topic\"] = pubsub_topic if", "= None, pubsub_topic: Optional[pulumi.Input[str]] = None, service_account: Optional[pulumi.Input[str]] = None,", "config, in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization: The organization", "Command Center (Cloud SCC) notification configs. A notification config is", "The arguments to use to populate this resource's properties. :param", "NotificationConfig resource. :param pulumi.Input[str] config_id: This must be unique within", "of the notification config (max of 1024 characters). \"\"\" pulumi.set(__self__,", "any of these accepted formats ```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig", "pulumi.Input[str] id: The unique provider ID of the resource to", "None, organization: Optional[pulumi.Input[str]] = None, pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config:", "-> pulumi.Output[str]: \"\"\" The organization whose Cloud Security Command Center", "The set of arguments for constructing a NotificationConfig resource. :param", "Bridge (tfgen) Tool. *** # *** Do not edit by", "None, pubsub_topic: Optional[pulumi.Input[str]] = None, streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None, __props__=None):", "be imported using any of these accepted formats ```sh $", "whose Cloud Security Command Center the Notification Config lives in.", "-> pulumi.Input[str]: \"\"\" The Pub/Sub topic to send notifications to.", "topic. \"\"\" return pulumi.get(self, \"service_account\") @service_account.setter def service_account(self, value: Optional[pulumi.Input[str]]):", "-> pulumi.Output[str]: \"\"\" This must be unique within the organization.", "*** WARNING: this file was generated by the Pulumi Terraform", "the organization. :param pulumi.Input[str] description: The description of the notification", "```sh $ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}} ``` ```sh $", "Structure is documented below. \"\"\" ... @overload def __init__(__self__, resource_name:", "order to use Cloud SCC resources, your organization must be", "if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts,", "\"streaming_config\", streaming_config) if description is not None: pulumi.set(__self__, \"description\", description)", "the Pub/Sub topic. \"\"\" return pulumi.get(self, \"service_account\") @service_account.setter def service_account(self,", "be unique within the organization. :param pulumi.Input[str] description: The description", "unique name of the resulting resource. :param pulumi.Input[str] id: The", "= \\\"ACTIVE\\\"\", )) ``` ## Import NotificationConfig can be imported", "= None __props__.__dict__[\"service_account\"] = None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig', resource_name, __props__,", "for triggering streaming-based notifications. Structure is documented below. \"\"\" return", "NotificationConfigArgs args: The arguments to use to populate this resource's", "of arguments for constructing a NotificationConfig resource. :param pulumi.Input[str] config_id:", "Its format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def", "in the format 'organizations/{{organization}}/notificationConfigs/{{config_id}}'. \"\"\" return pulumi.get(self, \"name\") @name.setter def", "@config_id.setter def config_id(self, value: pulumi.Input[str]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter", "format is \"projects/[project_id]/topics/[topic]\". \"\"\" return pulumi.get(self, \"pubsub_topic\") @property @pulumi.getter(name=\"serviceAccount\") def", "topic. \"\"\" return pulumi.get(self, \"service_account\") @property @pulumi.getter(name=\"streamingConfig\") def streaming_config(self) ->", "1024 characters). \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def name(self)", "pulumi.Input[str] service_account: The service account that needs \"pubsub.topics.publish\" permission to", "@property @pulumi.getter(name=\"serviceAccount\") def service_account(self) -> Optional[pulumi.Input[str]]: \"\"\" The service account", "if pubsub_topic is None and not opts.urn: raise TypeError(\"Missing required", "name is not None: pulumi.set(__self__, \"name\", name) if organization is", "is not None: pulumi.set(__self__, \"name\", name) if organization is not", "Optional[pulumi.Input[str]]): pulumi.set(self, \"description\", value) @pulumi.input_type class _NotificationConfigState: def __init__(__self__, *,", "= config_id __props__.__dict__[\"description\"] = description if organization is None and", "Documentation](https://cloud.google.com/security-command-center/docs) ## Example Usage ### Scc Notification Config Basic ```python", "*args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs) if", "**Note:** In order to use Cloud SCC resources, your organization", "id: The unique provider ID of the resource to lookup.", "organization. :param pulumi.Input[str] description: The description of the notification config", "is None: opts.version = _utilities.get_version() if opts.id is None: if", "below. \"\"\" return pulumi.get(self, \"streaming_config\") @streaming_config.setter def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]):", "if config_id is not None: pulumi.set(__self__, \"config_id\", config_id) if description", "__props__ is not None: raise TypeError('__props__ is only valid when", "if streaming_config is not None: pulumi.set(__self__, \"streaming_config\", streaming_config) @property @pulumi.getter(name=\"configId\")", "return pulumi.get(self, \"pubsub_topic\") @pubsub_topic.setter def pubsub_topic(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, \"pubsub_topic\",", "organization is None and not opts.urn: raise TypeError(\"Missing required property", "name(self) -> pulumi.Output[str]: \"\"\" The resource name of this notification", "pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__, \"organization\", organization) pulumi.set(__self__, \"pubsub_topic\", pubsub_topic) pulumi.set(__self__,", "and filtering NotificationConfig resources. :param pulumi.Input[str] config_id: This must be", "not opts.urn: raise TypeError(\"Missing required property 'streaming_config'\") __props__.__dict__[\"streaming_config\"] = streaming_config", "Do not edit by hand unless you're certain you know", "'organizations/{{organization}}/notificationConfigs/{{config_id}}'. :param pulumi.Input[str] organization: The organization whose Cloud Security Command", "None, streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None): \"\"\" Input properties used for", "> **Note:** In order to use Cloud SCC resources, your", "the Pub/Sub topic. :param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering", "pubsub_topic(self) -> pulumi.Output[str]: \"\"\" The Pub/Sub topic to send notifications", "def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, config_id:", "import pulumi_gcp as gcp scc_notification = gcp.pubsub.Topic(\"sccNotification\") custom_notification_config = gcp.securitycenter.NotificationConfig(\"customNotificationConfig\",", "streaming_config __props__.__dict__[\"name\"] = None __props__.__dict__[\"service_account\"] = None super(NotificationConfig, __self__).__init__( 'gcp:securitycenter/notificationConfig:NotificationConfig',", "not None: pulumi.set(__self__, \"config_id\", config_id) if description is not None:", "isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions", "NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id is None and not opts.urn: raise TypeError(\"Missing", "triggering streaming-based notifications. Structure is documented below. \"\"\" ... @overload", "notification configs. A notification config is a Cloud SCC resource", "send notifications for create/update events of findings, assets and etc.", "to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param", "@property @pulumi.getter def name(self) -> pulumi.Output[str]: \"\"\" The resource name", "resource creation. To get more information about NotificationConfig, see: *", "triggering streaming-based notifications. Structure is documented below. \"\"\" if config_id", "value: Optional[pulumi.Input[str]]): pulumi.set(self, \"name\", value) @property @pulumi.getter def organization(self) ->", "is only valid when passed in combination with a valid", "for the resource. :param pulumi.Input[str] config_id: This must be unique", "_utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name,", "NotificationConfig(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name=\"configId\") def config_id(self) -> pulumi.Output[str]: \"\"\"", "\"config_id\", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: \"\"\" The", "__init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, config_id: Optional[pulumi.Input[str]] =", "\"\"\" Get an existing NotificationConfig resource's state with the given", "A notification config is a Cloud SCC resource that contains", "to get an existing resource') __props__ = NotificationConfigArgs.__new__(NotificationConfigArgs) if config_id", "\"name\", name) if organization is not None: pulumi.set(__self__, \"organization\", organization)", "unique within the organization. \"\"\" return pulumi.get(self, \"config_id\") @config_id.setter def", "NotificationConfig can be imported using any of these accepted formats", "\"\"\" Input properties used for looking up and filtering NotificationConfig", "__props__.__dict__[\"service_account\"] = service_account __props__.__dict__[\"streaming_config\"] = streaming_config return NotificationConfig(resource_name, opts=opts, __props__=__props__)", "use Cloud SCC resources, your organization must be enrolled in", "NotificationConfigArgs: def __init__(__self__, *, config_id: pulumi.Input[str], organization: pulumi.Input[str], pubsub_topic: pulumi.Input[str],", "of 1024 characters). \"\"\" pulumi.set(__self__, \"config_id\", config_id) pulumi.set(__self__, \"organization\", organization)", "for create/update events of findings, assets and etc. > **Note:**", "Optional[pulumi.Input[str]]): pulumi.set(self, \"config_id\", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]:" ]
[ "pass @abstractmethod def act(self, s, exploration, env): pass def update(self,", "dtype=np.double) StationaryAgent.normalize(self.pi) def act(self, s, exploration, env): if self.verbose: print(\"pi", "RandomAgent(StationaryAgent): def __init__(self, id_, action_num, env): assert action_num > 0", "'').replace('bytes', '').replace('byte', '').rstrip('B') def full_name(self, env): return \"{}_{}_{}\".format(env.name, self.name, self.id_)", "def normalize(pi): minprob = np.min(pi) if minprob < 0.0: pi", "', '').replace('bytes', '').replace('byte', '').rstrip('B') def full_name(self, env): return \"{}_{}_{}\".format(env.name, self.name,", "import numpy as np class Agent(object): __metaclass__ = ABCMeta def", "id_, action_num, env, pi=None): super().__init__(\"stationary\", id_, action_num, env) if pi", "self.opp_action_space = env.action_space[0:id_] + env.action_space[id_:-1] def set_pi(self, pi): # assert", "is None: pi = np.random.dirichlet([1.0] * self.action_num) self.pi = np.array(pi,", "* self.action_num) self.pi = np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def act(self, s,", "env): self.name = name self.id_ = id_ self.action_num = action_num", "def set_pi(self, pi): # assert len(pi) == self.actin_num self.pi =", "format_time(n): return \"\" # s = humanfriendly.format_size(n) # return s.replace('", "self.name = name self.id_ = id_ self.action_num = action_num #", "import ABCMeta, abstractmethod import numpy as np class Agent(object): __metaclass__", "of agent {}: {}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi) @staticmethod def normalize(pi):", "if minprob < 0.0: pi -= minprob pi /= np.sum(pi)", "id_, action_num, env) if pi is None: pi = np.random.dirichlet([1.0]", "> 0 super().__init__(id_, env, action_num, pi=[1.0 / action_num] * action_num)", "a, o, r, s2, env): pass @staticmethod def format_time(n): return", "env): pass @staticmethod def format_time(n): return \"\" # s =", "\"{}_{}_{}\".format(env.name, self.name, self.id_) class StationaryAgent(Agent): def __init__(self, id_, action_num, env,", "action_num, env): self.name = name self.id_ = id_ self.action_num =", "= pi def done(self, env): pass @abstractmethod def act(self, s,", "env.action_space[0:id_] + env.action_space[id_:-1] def set_pi(self, pi): # assert len(pi) ==", "\"\" # s = humanfriendly.format_size(n) # return s.replace(' ', '').replace('bytes',", "s2, env): pass @staticmethod def format_time(n): return \"\" # s", "= action_num # len(env.action_space[id_]) # self.opp_action_space = env.action_space[0:id_] + env.action_space[id_:-1]", "# return s.replace(' ', '').replace('bytes', '').replace('byte', '').rstrip('B') def full_name(self, env):", "id_ self.action_num = action_num # len(env.action_space[id_]) # self.opp_action_space = env.action_space[0:id_]", "ABCMeta, abstractmethod import numpy as np class Agent(object): __metaclass__ =", "None: pi = np.random.dirichlet([1.0] * self.action_num) self.pi = np.array(pi, dtype=np.double)", "action_num, env): assert action_num > 0 super().__init__(id_, env, action_num, pi=[1.0", "ABCMeta def __init__(self, name, id_, action_num, env): self.name = name", "@staticmethod def sample(pi): return np.random.choice(pi.size, size=1, p=pi)[0] class RandomAgent(StationaryAgent): def", "= name self.id_ = id_ self.action_num = action_num # len(env.action_space[id_])", "env.action_space[id_:-1] def set_pi(self, pi): # assert len(pi) == self.actin_num self.pi", "env): assert action_num > 0 super().__init__(id_, env, action_num, pi=[1.0 /", "len(env.action_space[id_]) # self.opp_action_space = env.action_space[0:id_] + env.action_space[id_:-1] def set_pi(self, pi):", "@abstractmethod def act(self, s, exploration, env): pass def update(self, s,", "done(self, env): pass @abstractmethod def act(self, s, exploration, env): pass", "size=1, p=pi)[0] class RandomAgent(StationaryAgent): def __init__(self, id_, action_num, env): assert", "# s = humanfriendly.format_size(n) # return s.replace(' ', '').replace('bytes', '').replace('byte',", "super().__init__(id_, env, action_num, pi=[1.0 / action_num] * action_num) self.name =", "return \"\" # s = humanfriendly.format_size(n) # return s.replace(' ',", "super().__init__(\"stationary\", id_, action_num, env) if pi is None: pi =", "def full_name(self, env): return \"{}_{}_{}\".format(env.name, self.name, self.id_) class StationaryAgent(Agent): def", "= np.random.dirichlet([1.0] * self.action_num) self.pi = np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def", "pi /= np.sum(pi) @staticmethod def sample(pi): return np.random.choice(pi.size, size=1, p=pi)[0]", "numpy as np class Agent(object): __metaclass__ = ABCMeta def __init__(self,", "self.name, self.id_) class StationaryAgent(Agent): def __init__(self, id_, action_num, env, pi=None):", "exploration, env): pass def update(self, s, a, o, r, s2,", "s.replace(' ', '').replace('bytes', '').replace('byte', '').rstrip('B') def full_name(self, env): return \"{}_{}_{}\".format(env.name,", "np.min(pi) if minprob < 0.0: pi -= minprob pi /=", "action_num > 0 super().__init__(id_, env, action_num, pi=[1.0 / action_num] *", "0 super().__init__(id_, env, action_num, pi=[1.0 / action_num] * action_num) self.name", "update(self, s, a, o, r, s2, env): pass @staticmethod def", "def __init__(self, id_, action_num, env, pi=None): super().__init__(\"stationary\", id_, action_num, env)", "return \"{}_{}_{}\".format(env.name, self.name, self.id_) class StationaryAgent(Agent): def __init__(self, id_, action_num,", "np.sum(pi) @staticmethod def sample(pi): return np.random.choice(pi.size, size=1, p=pi)[0] class RandomAgent(StationaryAgent):", "= np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def act(self, s, exploration, env): if", "abstractmethod import numpy as np class Agent(object): __metaclass__ = ABCMeta", "minprob < 0.0: pi -= minprob pi /= np.sum(pi) @staticmethod", "self.id_) class StationaryAgent(Agent): def __init__(self, id_, action_num, env, pi=None): super().__init__(\"stationary\",", "act(self, s, exploration, env): if self.verbose: print(\"pi of agent {}:", "assert len(pi) == self.actin_num self.pi = pi def done(self, env):", "print(\"pi of agent {}: {}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi) @staticmethod def", "self.id_ = id_ self.action_num = action_num # len(env.action_space[id_]) # self.opp_action_space", "self.pi)) return StationaryAgent.sample(self.pi) @staticmethod def normalize(pi): minprob = np.min(pi) if", "normalize(pi): minprob = np.min(pi) if minprob < 0.0: pi -=", "if pi is None: pi = np.random.dirichlet([1.0] * self.action_num) self.pi", "s, a, o, r, s2, env): pass @staticmethod def format_time(n):", "pass def update(self, s, a, o, r, s2, env): pass", "s, exploration, env): if self.verbose: print(\"pi of agent {}: {}\".format(self.id_,", "-= minprob pi /= np.sum(pi) @staticmethod def sample(pi): return np.random.choice(pi.size,", "self.actin_num self.pi = pi def done(self, env): pass @abstractmethod def", "pi): # assert len(pi) == self.actin_num self.pi = pi def", "StationaryAgent.sample(self.pi) @staticmethod def normalize(pi): minprob = np.min(pi) if minprob <", "minprob pi /= np.sum(pi) @staticmethod def sample(pi): return np.random.choice(pi.size, size=1,", "class Agent(object): __metaclass__ = ABCMeta def __init__(self, name, id_, action_num,", "np.random.choice(pi.size, size=1, p=pi)[0] class RandomAgent(StationaryAgent): def __init__(self, id_, action_num, env):", "np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def act(self, s, exploration, env): if self.verbose:", "minprob = np.min(pi) if minprob < 0.0: pi -= minprob", "Agent(object): __metaclass__ = ABCMeta def __init__(self, name, id_, action_num, env):", "= humanfriendly.format_size(n) # return s.replace(' ', '').replace('bytes', '').replace('byte', '').rstrip('B') def", "# len(env.action_space[id_]) # self.opp_action_space = env.action_space[0:id_] + env.action_space[id_:-1] def set_pi(self,", "StationaryAgent(Agent): def __init__(self, id_, action_num, env, pi=None): super().__init__(\"stationary\", id_, action_num,", "def format_time(n): return \"\" # s = humanfriendly.format_size(n) # return", "s, exploration, env): pass def update(self, s, a, o, r,", "env) if pi is None: pi = np.random.dirichlet([1.0] * self.action_num)", "env): pass def update(self, s, a, o, r, s2, env):", "action_num, env, pi=None): super().__init__(\"stationary\", id_, action_num, env) if pi is", "np.random.dirichlet([1.0] * self.action_num) self.pi = np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def act(self,", "def update(self, s, a, o, r, s2, env): pass @staticmethod", "pass @staticmethod def format_time(n): return \"\" # s = humanfriendly.format_size(n)", "action_num # len(env.action_space[id_]) # self.opp_action_space = env.action_space[0:id_] + env.action_space[id_:-1] def", "return StationaryAgent.sample(self.pi) @staticmethod def normalize(pi): minprob = np.min(pi) if minprob", "class RandomAgent(StationaryAgent): def __init__(self, id_, action_num, env): assert action_num >", "# assert len(pi) == self.actin_num self.pi = pi def done(self,", "act(self, s, exploration, env): pass def update(self, s, a, o,", "from abc import ABCMeta, abstractmethod import numpy as np class", "'').replace('byte', '').rstrip('B') def full_name(self, env): return \"{}_{}_{}\".format(env.name, self.name, self.id_) class", "set_pi(self, pi): # assert len(pi) == self.actin_num self.pi = pi", "return s.replace(' ', '').replace('bytes', '').replace('byte', '').rstrip('B') def full_name(self, env): return", "def __init__(self, id_, action_num, env): assert action_num > 0 super().__init__(id_,", "def __init__(self, name, id_, action_num, env): self.name = name self.id_", "id_, action_num, env): self.name = name self.id_ = id_ self.action_num", "# self.opp_action_space = env.action_space[0:id_] + env.action_space[id_:-1] def set_pi(self, pi): #", "__init__(self, id_, action_num, env): assert action_num > 0 super().__init__(id_, env,", "as np class Agent(object): __metaclass__ = ABCMeta def __init__(self, name,", "+ env.action_space[id_:-1] def set_pi(self, pi): # assert len(pi) == self.actin_num", "o, r, s2, env): pass @staticmethod def format_time(n): return \"\"", "self.verbose: print(\"pi of agent {}: {}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi) @staticmethod", "agent {}: {}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi) @staticmethod def normalize(pi): minprob", "assert action_num > 0 super().__init__(id_, env, action_num, pi=[1.0 / action_num]", "self.pi = pi def done(self, env): pass @abstractmethod def act(self,", "__init__(self, id_, action_num, env, pi=None): super().__init__(\"stationary\", id_, action_num, env) if", "abc import ABCMeta, abstractmethod import numpy as np class Agent(object):", "def act(self, s, exploration, env): pass def update(self, s, a,", "env): if self.verbose: print(\"pi of agent {}: {}\".format(self.id_, self.pi)) return", "pi def done(self, env): pass @abstractmethod def act(self, s, exploration,", "env): pass @abstractmethod def act(self, s, exploration, env): pass def", "self.pi = np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def act(self, s, exploration, env):", "{}: {}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi) @staticmethod def normalize(pi): minprob =", "action_num, env) if pi is None: pi = np.random.dirichlet([1.0] *", "def sample(pi): return np.random.choice(pi.size, size=1, p=pi)[0] class RandomAgent(StationaryAgent): def __init__(self,", "len(pi) == self.actin_num self.pi = pi def done(self, env): pass", "return np.random.choice(pi.size, size=1, p=pi)[0] class RandomAgent(StationaryAgent): def __init__(self, id_, action_num,", "exploration, env): if self.verbose: print(\"pi of agent {}: {}\".format(self.id_, self.pi))", "== self.actin_num self.pi = pi def done(self, env): pass @abstractmethod", "humanfriendly.format_size(n) # return s.replace(' ', '').replace('bytes', '').replace('byte', '').rstrip('B') def full_name(self,", "= id_ self.action_num = action_num # len(env.action_space[id_]) # self.opp_action_space =", "{}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi) @staticmethod def normalize(pi): minprob = np.min(pi)", "sample(pi): return np.random.choice(pi.size, size=1, p=pi)[0] class RandomAgent(StationaryAgent): def __init__(self, id_,", "full_name(self, env): return \"{}_{}_{}\".format(env.name, self.name, self.id_) class StationaryAgent(Agent): def __init__(self,", "pi=None): super().__init__(\"stationary\", id_, action_num, env) if pi is None: pi", "name, id_, action_num, env): self.name = name self.id_ = id_", "pi -= minprob pi /= np.sum(pi) @staticmethod def sample(pi): return", "def done(self, env): pass @abstractmethod def act(self, s, exploration, env):", "@staticmethod def normalize(pi): minprob = np.min(pi) if minprob < 0.0:", "def act(self, s, exploration, env): if self.verbose: print(\"pi of agent", "env, pi=None): super().__init__(\"stationary\", id_, action_num, env) if pi is None:", "name self.id_ = id_ self.action_num = action_num # len(env.action_space[id_]) #", "= env.action_space[0:id_] + env.action_space[id_:-1] def set_pi(self, pi): # assert len(pi)", "np class Agent(object): __metaclass__ = ABCMeta def __init__(self, name, id_,", "pi is None: pi = np.random.dirichlet([1.0] * self.action_num) self.pi =", "env, action_num, pi=[1.0 / action_num] * action_num) self.name = \"random\"", "@staticmethod def format_time(n): return \"\" # s = humanfriendly.format_size(n) #", "'').rstrip('B') def full_name(self, env): return \"{}_{}_{}\".format(env.name, self.name, self.id_) class StationaryAgent(Agent):", "0.0: pi -= minprob pi /= np.sum(pi) @staticmethod def sample(pi):", "if self.verbose: print(\"pi of agent {}: {}\".format(self.id_, self.pi)) return StationaryAgent.sample(self.pi)", "= ABCMeta def __init__(self, name, id_, action_num, env): self.name =", "pi = np.random.dirichlet([1.0] * self.action_num) self.pi = np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi)", "class StationaryAgent(Agent): def __init__(self, id_, action_num, env, pi=None): super().__init__(\"stationary\", id_,", "env): return \"{}_{}_{}\".format(env.name, self.name, self.id_) class StationaryAgent(Agent): def __init__(self, id_,", "id_, action_num, env): assert action_num > 0 super().__init__(id_, env, action_num,", "__init__(self, name, id_, action_num, env): self.name = name self.id_ =", "r, s2, env): pass @staticmethod def format_time(n): return \"\" #", "self.action_num = action_num # len(env.action_space[id_]) # self.opp_action_space = env.action_space[0:id_] +", "p=pi)[0] class RandomAgent(StationaryAgent): def __init__(self, id_, action_num, env): assert action_num", "= np.min(pi) if minprob < 0.0: pi -= minprob pi", "self.action_num) self.pi = np.array(pi, dtype=np.double) StationaryAgent.normalize(self.pi) def act(self, s, exploration,", "< 0.0: pi -= minprob pi /= np.sum(pi) @staticmethod def", "__metaclass__ = ABCMeta def __init__(self, name, id_, action_num, env): self.name", "s = humanfriendly.format_size(n) # return s.replace(' ', '').replace('bytes', '').replace('byte', '').rstrip('B')", "/= np.sum(pi) @staticmethod def sample(pi): return np.random.choice(pi.size, size=1, p=pi)[0] class", "StationaryAgent.normalize(self.pi) def act(self, s, exploration, env): if self.verbose: print(\"pi of" ]
[ "if abmap[a]=='' and bamap[b]=='': abmap[a]=b bamap[b]=a elif abmap[a]!=b or bamap[b]!=a:", "str) -> bool: if len(pattern)!=len(str1.split()): return False abmap = defaultdict(str)", "bamap = defaultdict(str) for a,b in zip(pattern, str1.split()): if abmap[a]==''", "dog dog dog\" # Output: false # # Notes: #", "dog\"' # # Given a pattern and a string str,", "= \"aaaa\", str = \"dog cat cat dog\" # Output:", "zip(pattern, str1.split()): if abmap[a]=='' and bamap[b]=='': abmap[a]=b bamap[b]=a elif abmap[a]!=b", "# Likes: 825 # Dislikes: 113 # Total Accepted: 164K", "and a string str, find if str follows the same", "is a bijection between a # letter in pattern and", "letter in pattern and a non-empty word in str. #", "# Dislikes: 113 # Total Accepted: 164K # Total Submissions:", "str = \"dog cat cat dog\" # Output: true #", "Solution: def wordPattern(self, pattern: str, str1: str) -> bool: if", "pattern. # # Here follow means a full match, such", "contains # lowercase letters that may be separated by a", "cat cat dog\" # Output: true # # Example 2:", "Output: true # # Example 2: # # # Input:pattern", "true # # Example 2: # # # Input:pattern =", "bijection between a # letter in pattern and a non-empty", "= defaultdict(str) bamap = defaultdict(str) for a,b in zip(pattern, str1.split()):", "lang=python3 # # [290] Word Pattern # # https://leetcode.com/problems/word-pattern/description/ #", "Total Accepted: 164K # Total Submissions: 455.9K # Testcase Example:", "# Output: false # # Example 3: # # #", "Dislikes: 113 # Total Accepted: 164K # Total Submissions: 455.9K", "\"abba\", str = \"dog cat cat dog\" # Output: true", "a pattern and a string str, find if str follows", "false # # Example 4: # # # Input: pattern", "for a,b in zip(pattern, str1.split()): if abmap[a]=='' and bamap[b]=='': abmap[a]=b", "Output: false # # Example 3: # # # Input:", "Example 3: # # # Input: pattern = \"aaaa\", str", "Output: false # # Notes: # You may assume pattern", "in str. # # Example 1: # # # Input:", "bamap[b]=='': abmap[a]=b bamap[b]=a elif abmap[a]!=b or bamap[b]!=a: return False return", "dog\" # Output: true # # Example 2: # #", "# Output: true # # Example 2: # # #", "pattern: str, str1: str) -> bool: if len(pattern)!=len(str1.split()): return False", "Here follow means a full match, such that there is", "# # Example 4: # # # Input: pattern =", "separated by a single space. # # # @lc code=start", "Input: pattern = \"abba\", str = \"dog cat cat dog\"", "# Total Submissions: 455.9K # Testcase Example: '\"abba\"\\n\"dog cat cat", "a,b in zip(pattern, str1.split()): if abmap[a]=='' and bamap[b]=='': abmap[a]=b bamap[b]=a", "str = \"dog cat cat fish\" # Output: false #", "lowercase letters that may be separated by a single space.", "Example 1: # # # Input: pattern = \"abba\", str", "if str follows the same pattern. # # Here follow", "space. # # # @lc code=start from collections import defaultdict", "false # # Notes: # You may assume pattern contains", "such that there is a bijection between a # letter", "# @lc app=leetcode id=290 lang=python3 # # [290] Word Pattern", "2: # # # Input:pattern = \"abba\", str = \"dog", "# # # Input: pattern = \"abba\", str = \"dog", "\"dog cat cat dog\" # Output: false # # Example", "Easy (35.86%) # Likes: 825 # Dislikes: 113 # Total", "= \"abba\", str = \"dog cat cat fish\" # Output:", "cat fish\" # Output: false # # Example 3: #", "# # # @lc code=start from collections import defaultdict class", "in zip(pattern, str1.split()): if abmap[a]=='' and bamap[b]=='': abmap[a]=b bamap[b]=a elif", "# # # Input: pattern = \"aaaa\", str = \"dog", "wordPattern(self, pattern: str, str1: str) -> bool: if len(pattern)!=len(str1.split()): return", "str, find if str follows the same pattern. # #", "# Example 4: # # # Input: pattern = \"abba\",", "# # # Input:pattern = \"abba\", str = \"dog cat", "lowercase letters, and str contains # lowercase letters that may", "# Output: false # # Notes: # You may assume", "abmap[a]=b bamap[b]=a elif abmap[a]!=b or bamap[b]!=a: return False return True", "letters, and str contains # lowercase letters that may be", "Accepted: 164K # Total Submissions: 455.9K # Testcase Example: '\"abba\"\\n\"dog", "# Input:pattern = \"abba\", str = \"dog cat cat fish\"", "Output: false # # Example 4: # # # Input:", "# @lc code=start from collections import defaultdict class Solution: def", "between a # letter in pattern and a non-empty word", "= \"dog cat cat dog\" # Output: true # #", "cat cat dog\"' # # Given a pattern and a", "1: # # # Input: pattern = \"abba\", str =", "3: # # # Input: pattern = \"aaaa\", str =", "# letter in pattern and a non-empty word in str.", "contains only lowercase letters, and str contains # lowercase letters", "# # [290] Word Pattern # # https://leetcode.com/problems/word-pattern/description/ # #", "that there is a bijection between a # letter in", "a bijection between a # letter in pattern and a", "str = \"dog cat cat dog\" # Output: false #", "455.9K # Testcase Example: '\"abba\"\\n\"dog cat cat dog\"' # #", "the same pattern. # # Here follow means a full", "# [290] Word Pattern # # https://leetcode.com/problems/word-pattern/description/ # # algorithms", "# # Input: pattern = \"abba\", str = \"dog dog", "\"dog dog dog dog\" # Output: false # # Notes:", "(35.86%) # Likes: 825 # Dislikes: 113 # Total Accepted:", "fish\" # Output: false # # Example 3: # #", "# Example 3: # # # Input: pattern = \"aaaa\",", "# # Given a pattern and a string str, find", "113 # Total Accepted: 164K # Total Submissions: 455.9K #", "\"aaaa\", str = \"dog cat cat dog\" # Output: false", "same pattern. # # Here follow means a full match,", "= \"dog cat cat dog\" # Output: false # #", "# Total Accepted: 164K # Total Submissions: 455.9K # Testcase", "cat cat dog\" # Output: false # # Example 4:", "# # Notes: # You may assume pattern contains only", "= \"abba\", str = \"dog cat cat dog\" # Output:", "only lowercase letters, and str contains # lowercase letters that", "import defaultdict class Solution: def wordPattern(self, pattern: str, str1: str)", "dog dog\" # Output: false # # Notes: # You", "str. # # Example 1: # # # Input: pattern", "non-empty word in str. # # Example 1: # #", "Word Pattern # # https://leetcode.com/problems/word-pattern/description/ # # algorithms # Easy", "app=leetcode id=290 lang=python3 # # [290] Word Pattern # #", "# Notes: # You may assume pattern contains only lowercase", "Testcase Example: '\"abba\"\\n\"dog cat cat dog\"' # # Given a", "cat dog\"' # # Given a pattern and a string", "assume pattern contains only lowercase letters, and str contains #", "# Example 2: # # # Input:pattern = \"abba\", str", "# # Example 3: # # # Input: pattern =", "# algorithms # Easy (35.86%) # Likes: 825 # Dislikes:", "and str contains # lowercase letters that may be separated", "Example 4: # # # Input: pattern = \"abba\", str", "# lowercase letters that may be separated by a single", "dog\" # Output: false # # Notes: # You may", "# # @lc code=start from collections import defaultdict class Solution:", "\"abba\", str = \"dog cat cat fish\" # Output: false", "False abmap = defaultdict(str) bamap = defaultdict(str) for a,b in", "# You may assume pattern contains only lowercase letters, and", "[290] Word Pattern # # https://leetcode.com/problems/word-pattern/description/ # # algorithms #", "Notes: # You may assume pattern contains only lowercase letters,", "follow means a full match, such that there is a", "= \"dog dog dog dog\" # Output: false # #", "there is a bijection between a # letter in pattern", "may assume pattern contains only lowercase letters, and str contains", "# Here follow means a full match, such that there", "Total Submissions: 455.9K # Testcase Example: '\"abba\"\\n\"dog cat cat dog\"'", "# # Example 1: # # # Input: pattern =", "id=290 lang=python3 # # [290] Word Pattern # # https://leetcode.com/problems/word-pattern/description/", "# Output: false # # Example 4: # # #", "str, str1: str) -> bool: if len(pattern)!=len(str1.split()): return False abmap", "may be separated by a single space. # # #", "# # @lc app=leetcode id=290 lang=python3 # # [290] Word", "Likes: 825 # Dislikes: 113 # Total Accepted: 164K #", "Given a pattern and a string str, find if str", "# # Input: pattern = \"abba\", str = \"dog cat", "4: # # # Input: pattern = \"abba\", str =", "collections import defaultdict class Solution: def wordPattern(self, pattern: str, str1:", "pattern and a string str, find if str follows the", "word in str. # # Example 1: # # #", "Submissions: 455.9K # Testcase Example: '\"abba\"\\n\"dog cat cat dog\"' #", "letters that may be separated by a single space. #", "false # # Example 3: # # # Input: pattern", "\"dog cat cat dog\" # Output: true # # Example", "be separated by a single space. # # # @lc", "defaultdict(str) bamap = defaultdict(str) for a,b in zip(pattern, str1.split()): if", "a full match, such that there is a bijection between", "single space. # # # @lc code=start from collections import", "Example: '\"abba\"\\n\"dog cat cat dog\"' # # Given a pattern", "a non-empty word in str. # # Example 1: #", "defaultdict class Solution: def wordPattern(self, pattern: str, str1: str) ->", "bamap[b]=a elif abmap[a]!=b or bamap[b]!=a: return False return True #", "825 # Dislikes: 113 # Total Accepted: 164K # Total", "that may be separated by a single space. # #", "bool: if len(pattern)!=len(str1.split()): return False abmap = defaultdict(str) bamap =", "abmap[a]!=b or bamap[b]!=a: return False return True # @lc code=end", "-> bool: if len(pattern)!=len(str1.split()): return False abmap = defaultdict(str) bamap", "str1.split()): if abmap[a]=='' and bamap[b]=='': abmap[a]=b bamap[b]=a elif abmap[a]!=b or", "# # Here follow means a full match, such that", "match, such that there is a bijection between a #", "str = \"dog dog dog dog\" # Output: false #", "Input: pattern = \"abba\", str = \"dog dog dog dog\"", "pattern = \"abba\", str = \"dog dog dog dog\" #", "You may assume pattern contains only lowercase letters, and str", "code=start from collections import defaultdict class Solution: def wordPattern(self, pattern:", "cat dog\" # Output: true # # Example 2: #", "# Testcase Example: '\"abba\"\\n\"dog cat cat dog\"' # # Given", "means a full match, such that there is a bijection", "str contains # lowercase letters that may be separated by", "string str, find if str follows the same pattern. #", "\"dog cat cat fish\" # Output: false # # Example", "len(pattern)!=len(str1.split()): return False abmap = defaultdict(str) bamap = defaultdict(str) for", "# # Example 2: # # # Input:pattern = \"abba\",", "@lc app=leetcode id=290 lang=python3 # # [290] Word Pattern #", "# Example 1: # # # Input: pattern = \"abba\",", "and a non-empty word in str. # # Example 1:", "Input: pattern = \"aaaa\", str = \"dog cat cat dog\"", "# https://leetcode.com/problems/word-pattern/description/ # # algorithms # Easy (35.86%) # Likes:", "abmap = defaultdict(str) bamap = defaultdict(str) for a,b in zip(pattern,", "full match, such that there is a bijection between a", "cat cat fish\" # Output: false # # Example 3:", "# Given a pattern and a string str, find if", "= \"dog cat cat fish\" # Output: false # #", "# Input: pattern = \"abba\", str = \"dog dog dog", "elif abmap[a]!=b or bamap[b]!=a: return False return True # @lc", "pattern = \"aaaa\", str = \"dog cat cat dog\" #", "str follows the same pattern. # # Here follow means", "# Input: pattern = \"abba\", str = \"dog cat cat", "# Easy (35.86%) # Likes: 825 # Dislikes: 113 #", "# # algorithms # Easy (35.86%) # Likes: 825 #", "a single space. # # # @lc code=start from collections", "dog\" # Output: false # # Example 4: # #", "from collections import defaultdict class Solution: def wordPattern(self, pattern: str,", "= defaultdict(str) for a,b in zip(pattern, str1.split()): if abmap[a]=='' and", "def wordPattern(self, pattern: str, str1: str) -> bool: if len(pattern)!=len(str1.split()):", "if len(pattern)!=len(str1.split()): return False abmap = defaultdict(str) bamap = defaultdict(str)", "defaultdict(str) for a,b in zip(pattern, str1.split()): if abmap[a]=='' and bamap[b]=='':", "and bamap[b]=='': abmap[a]=b bamap[b]=a elif abmap[a]!=b or bamap[b]!=a: return False", "https://leetcode.com/problems/word-pattern/description/ # # algorithms # Easy (35.86%) # Likes: 825", "a string str, find if str follows the same pattern.", "# # https://leetcode.com/problems/word-pattern/description/ # # algorithms # Easy (35.86%) #", "# # Input:pattern = \"abba\", str = \"dog cat cat", "a # letter in pattern and a non-empty word in", "Pattern # # https://leetcode.com/problems/word-pattern/description/ # # algorithms # Easy (35.86%)", "pattern = \"abba\", str = \"dog cat cat dog\" #", "= \"abba\", str = \"dog dog dog dog\" # Output:", "class Solution: def wordPattern(self, pattern: str, str1: str) -> bool:", "# Input: pattern = \"aaaa\", str = \"dog cat cat", "algorithms # Easy (35.86%) # Likes: 825 # Dislikes: 113", "164K # Total Submissions: 455.9K # Testcase Example: '\"abba\"\\n\"dog cat", "Example 2: # # # Input:pattern = \"abba\", str =", "\"abba\", str = \"dog dog dog dog\" # Output: false", "by a single space. # # # @lc code=start from", "@lc code=start from collections import defaultdict class Solution: def wordPattern(self,", "in pattern and a non-empty word in str. # #", "find if str follows the same pattern. # # Here", "follows the same pattern. # # Here follow means a", "'\"abba\"\\n\"dog cat cat dog\"' # # Given a pattern and", "# # Input: pattern = \"aaaa\", str = \"dog cat", "pattern contains only lowercase letters, and str contains # lowercase", "str1: str) -> bool: if len(pattern)!=len(str1.split()): return False abmap =", "abmap[a]=='' and bamap[b]=='': abmap[a]=b bamap[b]=a elif abmap[a]!=b or bamap[b]!=a: return", "cat dog\" # Output: false # # Example 4: #", "Input:pattern = \"abba\", str = \"dog cat cat fish\" #", "return False abmap = defaultdict(str) bamap = defaultdict(str) for a,b", "pattern and a non-empty word in str. # # Example" ]
[ "= nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2) )", "self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2)", ") def forward(self, x): x = self.cnn(x).view(x.size(0), -1) return self.fc(x)", "MyAwesomeModel(nn.Module): def __init__(self): super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(),", "kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2) ) self.fc = nn.Sequential(nn.Linear(432,", "nn.LogSoftmax(dim=1) ) def forward(self, x): x = self.cnn(x).view(x.size(0), -1) return", "from torch import nn class MyAwesomeModel(nn.Module): def __init__(self): super().__init__() self.cnn", "def __init__(self): super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5,", "nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2) ) self.fc", "nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2) ) self.fc = nn.Sequential(nn.Linear(432, 100),", "class MyAwesomeModel(nn.Module): def __init__(self): super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3),", "import nn class MyAwesomeModel(nn.Module): def __init__(self): super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1,", "self.fc = nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def forward(self,", "super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3,", "out_channels=3, kernel_size=3, stride=2) ) self.fc = nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10),", "nn class MyAwesomeModel(nn.Module): def __init__(self): super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5,", "stride=2) ) self.fc = nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) )", "= nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def forward(self, x):", "kernel_size=3, stride=2) ) self.fc = nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1)", "nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def forward(self, x): x = self.cnn(x).view(x.size(0), -1)", "out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2) ) self.fc =", "__init__(self): super().__init__() self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3), nn.ReLU(), nn.Conv2d(in_channels=5, out_channels=3,", "100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def forward(self, x): x =", "nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def forward(self, x): x = self.cnn(x).view(x.size(0),", "torch import nn class MyAwesomeModel(nn.Module): def __init__(self): super().__init__() self.cnn =", ") self.fc = nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def", "nn.Sequential(nn.Linear(432, 100), nn.ReLU(), nn.Linear(100,10), nn.LogSoftmax(dim=1) ) def forward(self, x): x", "nn.Conv2d(in_channels=5, out_channels=3, kernel_size=3, stride=2) ) self.fc = nn.Sequential(nn.Linear(432, 100), nn.ReLU()," ]
[ "be zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should", "\"should be zero\") self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v,", "self.assertEqual(v, True, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should be", "\"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should be zero\")", "self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should be", "self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should be one\")", "v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should be zero\") pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"])", "bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i in range(len(a)):", "bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should", "\"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1]))", "be zero\") pass def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1,", "be zero\") pass def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True,", "v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should be zero\") pass def test_categorical_num(self): a=np.array([0,1,2,1])", "be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should be zero\") pass def", "a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for", "test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1],", "\"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2]))", "one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should be zero\") pass def test_categorical_str2(self):", "self.assertEqual(v, False, \"should be zero\") pass def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a,", "3,\"Fixing format\") for i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i],", "as pd import os import math fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase):", "a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i in", "be zero\") self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1,", "one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should be zero\") pass def test_binary_str(self):", "be zero\") self.assertEqual(bc[1], 0, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True,", "be zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2,", "self.assertEqual(bc[0][2], True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should be one\")", "\"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should be one\") v=bc._decode(np.array([0.2]))", "range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should work also\") pass", "also\") pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing", "be zero\") pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0,", "a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for i in", "\"should be zero\") pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0],", "2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i", "be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v,", "one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\",", "def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\")", "\"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should be zero\")", "\"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should be zero\") pass", "be zero\") pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True,", "v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should be zero\") pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"])", "musket_core import coders import numpy as np import pandas as", "should work also\") pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0]", "test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\")", "0, \"should be zero\") pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None)", "one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should be zero\") pass def test_categorical_num(self):", "r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should work also\") pass def test_multiclass2(self):", "1,\"Fixing format\") for i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i],", "1, \"should be zero\") pass def test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"])", "self.assertEqual(bc[0], 1, \"should be zero\") self.assertEqual(bc[1], 0, \"should be one\")", "val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i in range(len(a)): val=bc[i] r=bc._decode(val)", "as np import pandas as pd import os import math", "\"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should be zero\") pass", "\"a\", \"should be zero\") pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None)", "None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i in range(len(a)): val=bc[i]", "None) self.assertEqual(bc[0], 1, \"should be zero\") self.assertEqual(bc[1], 0, \"should be", "work also\") pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(),", "self.assertEqual(v, 0, \"should be zero\") pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a,", "True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45]))", "None) self.assertEqual(bc[0][2], True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should be", "def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\")", "self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should be", "self.assertEqual(v, \"0\", \"should be zero\") pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a,", "be zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\",", "False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\")", "def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for", "test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for i", "numpy as np import pandas as pd import os import", "\"\", \"should be zero\") pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None)", "v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should be zero\") pass def test_multiclass(self): a=np.array([\"1", "v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should be zero\") pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1])", "a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1], False,", "\"should be zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True,", "0, \"should be zero\") self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6]))", "one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0,", "be zero\") self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\",", "be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v,", "\"0\", \"should be zero\") pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None)", "os import math fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1])", "self.assertEqual(v, \"\", \"should be zero\") pass def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a,", "True, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should be zero\")", "self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should be", "2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i in", "a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1], False,", "be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should be zero\") pass def", "\"should be zero\") pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0],", "format\") for i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding", "for i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should", "\"Decoding should work also\") pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None)", "\"should be zero\") pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2],", "fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0],", "a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1], 1,", "self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should be", "one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False,", "\"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should be zero\") pass", "v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should", "self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r,", "v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should", "\"Decoding should work also\") pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None)", "be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should be zero\") pass def", "v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should be zero\") pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"])", "one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0,", "def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True, \"should be zero\")", "self.assertEqual(math.isnan(v),True, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should be zero\")", "one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should be zero\") pass def test_multiclass(self):", "None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for i in range(len(a)): val=bc[i]", "\"should be zero\") self.assertEqual(bc[1], 0, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v,", "pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be", "be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should be zero\") pass def", "zero\") pass def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should", "zero\") pass def test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0]", "\"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should be zero\") pass", "test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1],", "be zero\") pass def test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None)", "test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1],", "be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v,", "also\") pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing", "test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for i", "def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\")", "\"should be zero\") pass def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0],", "import math fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a,", "pandas as pd import os import math fl=__file__ fl=os.path.dirname(fl) class", "zero\") self.assertEqual(bc[1], 0, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should", "\"should be zero\") pass def test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a,", "v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should be zero\") pass def test_binary_bool(self): a=np.array([True,False,True,False])", "bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1], 1, \"should", "def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\")", "self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be", "\"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should be zero\") pass", "a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1, \"should be zero\") self.assertEqual(bc[1], 0,", "self.assertEqual(r, a[i], \"Decoding should work also\") pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"])", "bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for i in range(len(a)):", "one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should be zero\") pass def test_categorical_pd(self):", "self.assertEqual(v, \"\", \"should be zero\") pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a,", "pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be", "v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should", "0, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should be one\")", "False, \"should be zero\") pass def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None)", "self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be one\")", "self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1], 1, \"should be one\")", "should work also\") pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0]", "import coders import numpy as np import pandas as pd", "i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should work", "\"should be zero\") pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0],", "bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should", "bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1, \"should be zero\") self.assertEqual(bc[1], 0, \"should", "be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should be zero\") pass def", "pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be", "in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should work also\")", "2, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should be zero\")", "v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should be zero\") pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"])", "one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should", "a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1], False,", "pd import os import math fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def", "a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1], 1,", "v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should", "\"should be zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v,", "zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should", "unittest from musket_core import coders import numpy as np import", "\"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should be zero\") pass", "False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should be one\")", "1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should be one\")", "\"should be zero\") pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0],", "v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1, \"should be", "import os import math fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def test_binary_num(self):", "import pandas as pd import os import math fl=__file__ fl=os.path.dirname(fl)", "self.assertEqual(v, \"a\", \"should be zero\") pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a,", "False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1]))", "pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\")", "be zero\") pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0,", "work also\") pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(),", "pass def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1, \"should be", "a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1], 1,", "zero\") self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should", "val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for i in range(len(a)): val=bc[i] r=bc._decode(val)", "\"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should be zero\")", "np import pandas as pd import os import math fl=__file__", "def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing format\") for", "pass def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be", "class TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should", "self.assertEqual(v, 0, \"should be zero\") pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a,", "import unittest from musket_core import coders import numpy as np", "math fl=__file__ fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None)", "0, \"should be zero\") pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None)", "pass def test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(),", "a[i], \"Decoding should work also\") pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a,", "self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\") for i in range(len(a)): val=bc[i] r=bc._decode(val) self.assertEqual(r,", "be zero\") pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True,", "\"\", \"should be zero\") pass def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None)", "\"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1]))", "TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be", "zero\") pass def test_binary_str2(self): a=np.array([\"\",\"1\",\"\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should", "\"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should be zero\")", "self.assertEqual(v, 1, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should be", "test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1],", "None) self.assertEqual(bc[0], 0, \"should be zero\") self.assertEqual(bc[1], 1, \"should be", "import numpy as np import pandas as pd import os", "one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"0\", \"should be zero\") pass def test_binary_str2(self):", "pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be", "zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be", "be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\", \"should be zero\") pass def", "zero\") pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True, \"should", "test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True, \"should be zero\") self.assertEqual(bc[0][1],", "one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\",", "zero\") pass def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should", "test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1, \"should be zero\") self.assertEqual(bc[1],", "self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be", "test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1],", "be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should be zero\") pass def", "self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should be", "pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([True])).sum(), 1,\"Fixing format\")", "be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v,", "self.assertEqual(v, 1, \"should be zero\") pass def test_multiclass(self): a=np.array([\"1 2\",\"0", "v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"a\", \"should", "self.assertEqual(v, 2, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should be", "one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\", \"should be zero\") pass def test_binary_bool(self):", "zero\") pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should", "def test_multiclass(self): a=np.array([\"1 2\",\"0 2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a, None) val=bc[0] self.assertEqual((val==np.array([False,True,True])).sum(), 3,\"Fixing", "r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should work also\") pass def test_multiclass1(self):", "be zero\") pass def test_categorical_str(self): a=np.array([\"a\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True,", "from musket_core import coders import numpy as np import pandas", "def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\")", "be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v,", "one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, \"c\", \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, \"\",", "zero\") self.assertEqual(bc[0][1], False, \"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should", "zero\") self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should", "be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 1,", "v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(v, 2, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should", "self.assertEqual(bc[1], 1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should be", "one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should be zero\") pass def test_categorical_str(self):", "1, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should be zero\")", "1, \"should be zero\") self.assertEqual(bc[1], 0, \"should be one\") v=bc._decode(np.array([0.6]))", "\"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, 1, \"should be one\") v=bc._decode(np.array([0.2]))", "a[i], \"Decoding should work also\") pass def test_multiclass1(self): a=np.array([\"1_2\",\"0_2\",\"0\",\"\"]) bc=coders.get_coder(\"multi_class\",a,", "\"should be zero\") pass def test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0],", "zero\") pass def test_binary_str(self): a=np.array([\"0\",\"1\",\"0\",\"1\"]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0, \"should", "1, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\")", "def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1, \"should be zero\")", "def test_categorical_str2(self): a=np.array([\"\",\"b\",\"c\",\"b\"]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\")", "coders import numpy as np import pandas as pd import", "self.assertEqual(bc[1], 0, \"should be one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should be", "\"should be one\") v=bc._decode(np.array([0.3,0.4,0.45])) self.assertEqual(math.isnan(v),True, \"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v,", "None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1], False, \"should be", "zero\") pass def test_binary_bool(self): a=np.array([True,False,True,False]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 1, \"should", "test_categorical_num(self): a=np.array([0,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][0], True, \"should be zero\") self.assertEqual(bc[0][1],", "pass def test_categorical_pd(self): a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True, \"should be", "val=bc[i] r=bc._decode(val) self.assertEqual(r, a[i], \"Decoding should work also\") pass def", "a=np.array([math.nan,1,2,1]) bc=coders.get_coder(\"categorical_one_hot\",a, None) self.assertEqual(bc[0][2], True, \"should be zero\") self.assertEqual(bc[0][1], False,", "v=bc._decode(np.array([0.6])) self.assertEqual(v, True, \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should", "self.assertEqual(r, a[i], \"Decoding should work also\") pass def test_multiclass2(self): a=np.array([\"1\",\"\",\"\",\"\"])", "be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, 0, \"should be zero\") pass def", "\"should be one\") v=bc._decode(np.array([0.2,0.1,0.1])) self.assertEqual(v, 0, \"should be zero\") pass", "\"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, False, \"should be zero\") pass", "fl=os.path.dirname(fl) class TestCoders(unittest.TestCase): def test_binary_num(self): a=np.array([0,1,0,1]) bc=coders.get_coder(\"binary\",a, None) self.assertEqual(bc[0], 0,", "one\") v=bc._decode(np.array([0.6])) self.assertEqual(v, \"1\", \"should be one\") v=bc._decode(np.array([0.2])) self.assertEqual(v, \"\"," ]
[ "len(sys.argv) > 1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except CommandError as e:", "/etc/zulip/zulip.conf (in # which case it's a production server, not", "in sys.argv and len(sys.argv) > 1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except", "root. Use `su zulip` to drop root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf',", "django.core.management.base import CommandError from scripts.lib.zulip_tools import log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH)", "lack of access for /etc/zulip/zulip-secrets.conf (which # should be only", "another user in # production before importing anything that would", "before importing anything that would require that # access is", "import scripts.lib.setup_path_on_import if __name__ == \"__main__\": if 'posix' in os.name", "os.R_OK)): # The best way to detect running manage.py as", "\"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not in sys.argv and len(sys.argv) > 1:", "zulip) print(\"Error accessing Zulip secrets; manage.py in production must be", "that # access is to check for access to /etc/zulip/zulip.conf", "sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except CommandError as e: print(e, file=sys.stderr) sys.exit(1)", "os import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if", "as the zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf import", "sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The", "if \"--no-traceback\" not in sys.argv and len(sys.argv) > 1: sys.argv.append(\"--traceback\")", "and lack of access for /etc/zulip/zulip-secrets.conf (which # should be", "sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if __name__ == \"__main__\": if 'posix' in", "settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not in sys.argv and", "# The best way to detect running manage.py as another", "== 0: print(\"manage.py should not be run as root. Use", "root and zulip) print(\"Error accessing Zulip secrets; manage.py in production", "CommandError from scripts.lib.zulip_tools import log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR,", "that would require that # access is to check for", "access to /etc/zulip/zulip.conf (in # which case it's a production", "# should be only readable by root and zulip) print(\"Error", "os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not in sys.argv and len(sys.argv) >", "a dev environment) # and lack of access for /etc/zulip/zulip-secrets.conf", "importing anything that would require that # access is to", "> 1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except CommandError as e: print(e,", "access is to check for access to /etc/zulip/zulip.conf (in #", "only readable by root and zulip) print(\"Error accessing Zulip secrets;", "os.geteuid() == 0: print(\"manage.py should not be run as root.", "drop root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)):", "in os.name and os.geteuid() == 0: print(\"manage.py should not be", "from django.core.management import execute_from_command_line from django.core.management.base import CommandError from scripts.lib.zulip_tools", "if 'posix' in os.name and os.geteuid() == 0: print(\"manage.py should", "== \"__main__\": if 'posix' in os.name and os.geteuid() == 0:", "django.core.management import execute_from_command_line from django.core.management.base import CommandError from scripts.lib.zulip_tools import", "= os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if __name__ == \"__main__\": if", "not a dev environment) # and lack of access for", "dev environment) # and lack of access for /etc/zulip/zulip-secrets.conf (which", "(which # should be only readable by root and zulip)", "if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The best", "settings from django.core.management import execute_from_command_line from django.core.management.base import CommandError from", "is to check for access to /etc/zulip/zulip.conf (in # which", "zulip` to drop root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not", "environment) # and lack of access for /etc/zulip/zulip-secrets.conf (which #", "import execute_from_command_line from django.core.management.base import CommandError from scripts.lib.zulip_tools import log_management_command", "# and lack of access for /etc/zulip/zulip-secrets.conf (which # should", "\"__main__\": if 'posix' in os.name and os.geteuid() == 0: print(\"manage.py", "\"zproject.settings\") from django.conf import settings from django.core.management import execute_from_command_line from", "__name__ == \"__main__\": if 'posix' in os.name and os.geteuid() ==", "not be run as root. Use `su zulip` to drop", "#!/usr/bin/env python3 import os import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR)", "and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The best way to detect", "run as root. Use `su zulip` to drop root.\") sys.exit(1)", "scripts.lib.zulip_tools import log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if", "\".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not in sys.argv", "be only readable by root and zulip) print(\"Error accessing Zulip", "to check for access to /etc/zulip/zulip.conf (in # which case", "secrets; manage.py in production must be run as the zulip", "run as the zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf", "/etc/zulip/zulip-secrets.conf (which # should be only readable by root and", "import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if __name__", "Zulip secrets; manage.py in production must be run as the", "it's a production server, not a dev environment) # and", "sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if __name__ ==", "if __name__ == \"__main__\": if 'posix' in os.name and os.geteuid()", "access for /etc/zulip/zulip-secrets.conf (which # should be only readable by", "require that # access is to check for access to", "running manage.py as another user in # production before importing", "# production before importing anything that would require that #", "log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not in", "way to detect running manage.py as another user in #", "in # production before importing anything that would require that", "should not be run as root. Use `su zulip` to", "`su zulip` to drop root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK) and", "production must be run as the zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\",", "The best way to detect running manage.py as another user", "should be only readable by root and zulip) print(\"Error accessing", "zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf import settings from", "manage.py as another user in # production before importing anything", "os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The best way to", "from scripts.lib.zulip_tools import log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\"))", "os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf import settings from django.core.management import execute_from_command_line", "to /etc/zulip/zulip.conf (in # which case it's a production server,", "os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not in sys.argv and len(sys.argv)", "user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf import settings from django.core.management", "0: print(\"manage.py should not be run as root. Use `su", "import CommandError from scripts.lib.zulip_tools import log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\",", "Use `su zulip` to drop root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK)", "os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The best way to detect running manage.py", "case it's a production server, not a dev environment) #", "for /etc/zulip/zulip-secrets.conf (which # should be only readable by root", "anything that would require that # access is to check", "sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf import settings from django.core.management import", "root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): #", "user in # production before importing anything that would require", "in production must be run as the zulip user.\") sys.exit(1)", "of access for /etc/zulip/zulip-secrets.conf (which # should be only readable", "import settings from django.core.management import execute_from_command_line from django.core.management.base import CommandError", "scripts.lib.setup_path_on_import if __name__ == \"__main__\": if 'posix' in os.name and", "be run as root. Use `su zulip` to drop root.\")", "manage.py in production must be run as the zulip user.\")", "1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except CommandError as e: print(e, file=sys.stderr)", "print(\"Error accessing Zulip secrets; manage.py in production must be run", "to detect running manage.py as another user in # production", "and os.geteuid() == 0: print(\"manage.py should not be run as", "detect running manage.py as another user in # production before", "from django.conf import settings from django.core.management import execute_from_command_line from django.core.management.base", "as root. Use `su zulip` to drop root.\") sys.exit(1) if", "os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if __name__ == \"__main__\": if 'posix'", "sys.argv and len(sys.argv) > 1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except CommandError", "os.name and os.geteuid() == 0: print(\"manage.py should not be run", "a production server, not a dev environment) # and lack", "to drop root.\") sys.exit(1) if (os.access('/etc/zulip/zulip.conf', os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf',", "production before importing anything that would require that # access", "and zulip) print(\"Error accessing Zulip secrets; manage.py in production must", "# which case it's a production server, not a dev", "be run as the zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from", "python3 import os import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import", "'posix' in os.name and os.geteuid() == 0: print(\"manage.py should not", "by root and zulip) print(\"Error accessing Zulip secrets; manage.py in", "accessing Zulip secrets; manage.py in production must be run as", "which case it's a production server, not a dev environment)", "django.conf import settings from django.core.management import execute_from_command_line from django.core.management.base import", "log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\" not", "\"--no-traceback\" not in sys.argv and len(sys.argv) > 1: sys.argv.append(\"--traceback\") try:", "check for access to /etc/zulip/zulip.conf (in # which case it's", "not in sys.argv and len(sys.argv) > 1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv)", "as another user in # production before importing anything that", "must be run as the zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\")", "(os.access('/etc/zulip/zulip.conf', os.R_OK) and not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The best way", "and len(sys.argv) > 1: sys.argv.append(\"--traceback\") try: execute_from_command_line(sys.argv) except CommandError as", "server, not a dev environment) # and lack of access", "readable by root and zulip) print(\"Error accessing Zulip secrets; manage.py", "from django.core.management.base import CommandError from scripts.lib.zulip_tools import log_management_command log_management_command(\" \".join(sys.argv),", "print(\"manage.py should not be run as root. Use `su zulip`", "the zulip user.\") sys.exit(1) os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"zproject.settings\") from django.conf import settings", "not os.access('/etc/zulip/zulip-secrets.conf', os.R_OK)): # The best way to detect running", "best way to detect running manage.py as another user in", "(in # which case it's a production server, not a", "for access to /etc/zulip/zulip.conf (in # which case it's a", "execute_from_command_line from django.core.management.base import CommandError from scripts.lib.zulip_tools import log_management_command log_management_command(\"", "import log_management_command log_management_command(\" \".join(sys.argv), settings.MANAGEMENT_LOG_PATH) os.environ.setdefault(\"PYTHONSTARTUP\", os.path.join(BASE_DIR, \"scripts/lib/pythonrc.py\")) if \"--no-traceback\"", "import os import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import", "BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) import scripts.lib.setup_path_on_import if __name__ == \"__main__\":", "production server, not a dev environment) # and lack of", "would require that # access is to check for access", "# access is to check for access to /etc/zulip/zulip.conf (in" ]
[ "= r.text.replace('\\n', '') # Return the data as a BeautifulSoup", "td['axis'].split('|') # Conditional instructions have different durations depending on how", "int(specs[2]) opcodes.append({ 'opcode': code, 'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]), 'cycles': cycles,", "BeautifulSoup object for easy querying return BeautifulSoup(text, 'html.parser') def table_title(table):", "None # remove newlines text = r.text.replace('\\n', '') # Return", "are stored in an array. Otherwise, the # duration is", "page is not None: opcodes = parse_tables(page) with open('opcodes.json', 'w')", "is just stored as a single value. cycles = list(map(int,", "'ret', 'call'] and operand in ['c', 'nc', 'z', 'nz', 'po',", "}) print(' {}: {}'.format(hex(code), td.text)) return opcodes def normalize(mnemonics): parts", "condition if instr_name in ['jr', 'jp', 'ret', 'call'] and operand", "operand) # Alt registers elif operand == 'af\\'': operand =", "'p', 'm']: operand = 'f_' + { 'po': 'np', 'pe':", "# Flag condition if instr_name in ['jr', 'jp', 'ret', 'call']", "if instr_name in ['jr', 'jp', 'ret', 'call'] and operand in", "import json, requests from bs4 import BeautifulSoup def fetch(): r", "This scripts fetches the contents of a webpage that contains", "= requests.get('http://clrhome.org/table/') if not r.ok: print('Cannot fetch {})'.format(r.url)) return None", "4 | lo specs = td['axis'].split('|') # Conditional instructions have", "not r.ok: print('Cannot fetch {})'.format(r.url)) return None # remove newlines", "fetches the contents of a webpage that contains nicely formatted", "as a BeautifulSoup object for easy querying return BeautifulSoup(text, 'html.parser')", "Conditional instructions have different durations depending on how they #", "'m': 's' }.get(operand, operand) # Alt registers elif operand ==", "object for easy querying return BeautifulSoup(text, 'html.parser') def table_title(table): return", "16) # row lo = td.parent.index(td) - 1 # column", "return '{} {}'.format(name, ','.join(normalize_operand(o, name) for o in operands)) def", "'' else table['title'].lower() def parse_tables(page): return {table_title(table): parse_table(table) for table", "'description': specs[3] }) print(' {}: {}'.format(hex(code), td.text)) return opcodes def", "if __name__ == '__main__': \"\"\" This scripts fetches the contents", "return None # remove newlines text = r.text.replace('\\n', '') #", "and outputs it to JSON. \"\"\" page = fetch() if", "<filename>core/scripts/fetch_instructions_specs.py<gh_stars>0 import json, requests from bs4 import BeautifulSoup def fetch():", "in table.find_all('td', axis=True): hi = int(td.parent.find('th').text, 16) # row lo", "json, requests from bs4 import BeautifulSoup def fetch(): r =", "== '' else table['title'].lower() def parse_tables(page): return {table_title(table): parse_table(table) for", "{}'.format(hex(code), td.text)) return opcodes def normalize(mnemonics): parts = mnemonics.split(' ')", "return {table_title(table): parse_table(table) for table in page.find_all('table')} def parse_table(table): print('Table", "\"\"\" page = fetch() if page is not None: opcodes", "r = requests.get('http://clrhome.org/table/') if not r.ok: print('Cannot fetch {})'.format(r.url)) return", "hi = int(td.parent.find('th').text, 16) # row lo = td.parent.index(td) -", "# Conditional instructions have different durations depending on how they", "value. cycles = list(map(int, specs[2].split('/'))) if '/' in specs[2] else", "data about the Z80 opcodes and outputs it to JSON.", "specs[3] }) print(' {}: {}'.format(hex(code), td.text)) return opcodes def normalize(mnemonics):", "'main' if table['title'] == '' else table['title'].lower() def parse_tables(page): return", "parts[1].split(',') if len(parts) > 1 else [] return '{} {}'.format(name,", "= fetch() if page is not None: opcodes = parse_tables(page)", "def parse_table(table): print('Table {}'.format(table_title(table))) opcodes = [] for td in", "else [] return '{} {}'.format(name, ','.join(normalize_operand(o, name) for o in", "# Return the data as a BeautifulSoup object for easy", "specs = td['axis'].split('|') # Conditional instructions have different durations depending", "instructions have different durations depending on how they # branch", "so the possible durations are stored in an array. Otherwise,", "normalize_operand(operand, instr_name): # Flag condition if instr_name in ['jr', 'jp',", "['jr', 'jp', 'ret', 'call'] and operand in ['c', 'nc', 'z',", "'call'] and operand in ['c', 'nc', 'z', 'nz', 'po', 'pe',", "'f_' + { 'po': 'np', 'pe': 'p', 'p': 'ns', 'm':", "\"\"\" This scripts fetches the contents of a webpage that", "'jp', 'ret', 'call'] and operand in ['c', 'nc', 'z', 'nz',", "about the Z80 opcodes and outputs it to JSON. \"\"\"", "operand = 'af_' return operand if __name__ == '__main__': \"\"\"", "'z', 'nz', 'po', 'pe', 'p', 'm']: operand = 'f_' +", "in specs[2] else int(specs[2]) opcodes.append({ 'opcode': code, 'mnemonics': normalize(td.text).strip(), 'size':", "if table['title'] == '' else table['title'].lower() def parse_tables(page): return {table_title(table):", "for easy querying return BeautifulSoup(text, 'html.parser') def table_title(table): return 'main'", "operand in ['c', 'nc', 'z', 'nz', 'po', 'pe', 'p', 'm']:", "def normalize(mnemonics): parts = mnemonics.split(' ') name = parts[0] operands", "Z80 opcodes and outputs it to JSON. \"\"\" page =", "= mnemonics.split(' ') name = parts[0] operands = parts[1].split(',') if", "td in table.find_all('td', axis=True): hi = int(td.parent.find('th').text, 16) # row", "is not None: opcodes = parse_tables(page) with open('opcodes.json', 'w') as", "print('Table {}'.format(table_title(table))) opcodes = [] for td in table.find_all('td', axis=True):", "the contents of a webpage that contains nicely formatted data", "BeautifulSoup def fetch(): r = requests.get('http://clrhome.org/table/') if not r.ok: print('Cannot", "instr_name in ['jr', 'jp', 'ret', 'call'] and operand in ['c',", "['c', 'nc', 'z', 'nz', 'po', 'pe', 'p', 'm']: operand =", "len(parts) > 1 else [] return '{} {}'.format(name, ','.join(normalize_operand(o, name)", "have different durations depending on how they # branch so", "stored as a single value. cycles = list(map(int, specs[2].split('/'))) if", "parse_table(table): print('Table {}'.format(table_title(table))) opcodes = [] for td in table.find_all('td',", "= int(td.parent.find('th').text, 16) # row lo = td.parent.index(td) - 1", "td.parent.index(td) - 1 # column code = hi << 4", "just stored as a single value. cycles = list(map(int, specs[2].split('/')))", "opcodes.append({ 'opcode': code, 'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]), 'cycles': cycles, 'flags':", "1 else [] return '{} {}'.format(name, ','.join(normalize_operand(o, name) for o", "name) for o in operands)) def normalize_operand(operand, instr_name): # Flag", "# Alt registers elif operand == 'af\\'': operand = 'af_'", "for o in operands)) def normalize_operand(operand, instr_name): # Flag condition", "of a webpage that contains nicely formatted data about the", "contains nicely formatted data about the Z80 opcodes and outputs", "hi << 4 | lo specs = td['axis'].split('|') # Conditional", "<< 4 | lo specs = td['axis'].split('|') # Conditional instructions", "# duration is just stored as a single value. cycles", "the Z80 opcodes and outputs it to JSON. \"\"\" page", "name = parts[0] operands = parts[1].split(',') if len(parts) > 1", "requests from bs4 import BeautifulSoup def fetch(): r = requests.get('http://clrhome.org/table/')", "code, 'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]), 'cycles': cycles, 'flags': specs[0], 'description':", "import BeautifulSoup def fetch(): r = requests.get('http://clrhome.org/table/') if not r.ok:", "branch so the possible durations are stored in an array.", "'ns', 'm': 's' }.get(operand, operand) # Alt registers elif operand", "[] for td in table.find_all('td', axis=True): hi = int(td.parent.find('th').text, 16)", "for td in table.find_all('td', axis=True): hi = int(td.parent.find('th').text, 16) #", "[] return '{} {}'.format(name, ','.join(normalize_operand(o, name) for o in operands))", "in ['c', 'nc', 'z', 'nz', 'po', 'pe', 'p', 'm']: operand", "to JSON. \"\"\" page = fetch() if page is not", "specs[2] else int(specs[2]) opcodes.append({ 'opcode': code, 'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]),", "return opcodes def normalize(mnemonics): parts = mnemonics.split(' ') name =", "parse_table(table) for table in page.find_all('table')} def parse_table(table): print('Table {}'.format(table_title(table))) opcodes", "stored in an array. Otherwise, the # duration is just", "operand = 'f_' + { 'po': 'np', 'pe': 'p', 'p':", "lo = td.parent.index(td) - 1 # column code = hi", "elif operand == 'af\\'': operand = 'af_' return operand if", "'cycles': cycles, 'flags': specs[0], 'description': specs[3] }) print(' {}: {}'.format(hex(code),", "cycles, 'flags': specs[0], 'description': specs[3] }) print(' {}: {}'.format(hex(code), td.text))", "lo specs = td['axis'].split('|') # Conditional instructions have different durations", "Alt registers elif operand == 'af\\'': operand = 'af_' return", "a webpage that contains nicely formatted data about the Z80", "fetch() if page is not None: opcodes = parse_tables(page) with", "= [] for td in table.find_all('td', axis=True): hi = int(td.parent.find('th').text,", "# column code = hi << 4 | lo specs", "BeautifulSoup(text, 'html.parser') def table_title(table): return 'main' if table['title'] == ''", "def parse_tables(page): return {table_title(table): parse_table(table) for table in page.find_all('table')} def", "outputs it to JSON. \"\"\" page = fetch() if page", "int(specs[1]), 'cycles': cycles, 'flags': specs[0], 'description': specs[3] }) print(' {}:", "= parse_tables(page) with open('opcodes.json', 'w') as output: json.dump(opcodes, output, indent=2)", "bs4 import BeautifulSoup def fetch(): r = requests.get('http://clrhome.org/table/') if not", "o in operands)) def normalize_operand(operand, instr_name): # Flag condition if", "> 1 else [] return '{} {}'.format(name, ','.join(normalize_operand(o, name) for", "'po', 'pe', 'p', 'm']: operand = 'f_' + { 'po':", "else table['title'].lower() def parse_tables(page): return {table_title(table): parse_table(table) for table in", "parse_tables(page): return {table_title(table): parse_table(table) for table in page.find_all('table')} def parse_table(table):", "if '/' in specs[2] else int(specs[2]) opcodes.append({ 'opcode': code, 'mnemonics':", "| lo specs = td['axis'].split('|') # Conditional instructions have different", "depending on how they # branch so the possible durations", "') name = parts[0] operands = parts[1].split(',') if len(parts) >", "__name__ == '__main__': \"\"\" This scripts fetches the contents of", "{}'.format(table_title(table))) opcodes = [] for td in table.find_all('td', axis=True): hi", "axis=True): hi = int(td.parent.find('th').text, 16) # row lo = td.parent.index(td)", "= list(map(int, specs[2].split('/'))) if '/' in specs[2] else int(specs[2]) opcodes.append({", "# branch so the possible durations are stored in an", "'p': 'ns', 'm': 's' }.get(operand, operand) # Alt registers elif", "column code = hi << 4 | lo specs =", "specs[2].split('/'))) if '/' in specs[2] else int(specs[2]) opcodes.append({ 'opcode': code,", "how they # branch so the possible durations are stored", "possible durations are stored in an array. Otherwise, the #", "and operand in ['c', 'nc', 'z', 'nz', 'po', 'pe', 'p',", "duration is just stored as a single value. cycles =", "that contains nicely formatted data about the Z80 opcodes and", "durations are stored in an array. Otherwise, the # duration", "if len(parts) > 1 else [] return '{} {}'.format(name, ','.join(normalize_operand(o,", "'af_' return operand if __name__ == '__main__': \"\"\" This scripts", "in an array. Otherwise, the # duration is just stored", "for table in page.find_all('table')} def parse_table(table): print('Table {}'.format(table_title(table))) opcodes =", "table in page.find_all('table')} def parse_table(table): print('Table {}'.format(table_title(table))) opcodes = []", "list(map(int, specs[2].split('/'))) if '/' in specs[2] else int(specs[2]) opcodes.append({ 'opcode':", "in ['jr', 'jp', 'ret', 'call'] and operand in ['c', 'nc',", "fetch(): r = requests.get('http://clrhome.org/table/') if not r.ok: print('Cannot fetch {})'.format(r.url))", "'html.parser') def table_title(table): return 'main' if table['title'] == '' else", "different durations depending on how they # branch so the", "{})'.format(r.url)) return None # remove newlines text = r.text.replace('\\n', '')", "# remove newlines text = r.text.replace('\\n', '') # Return the", "int(td.parent.find('th').text, 16) # row lo = td.parent.index(td) - 1 #", "'af\\'': operand = 'af_' return operand if __name__ == '__main__':", "requests.get('http://clrhome.org/table/') if not r.ok: print('Cannot fetch {})'.format(r.url)) return None #", "'po': 'np', 'pe': 'p', 'p': 'ns', 'm': 's' }.get(operand, operand)", "'nz', 'po', 'pe', 'p', 'm']: operand = 'f_' + {", "parts = mnemonics.split(' ') name = parts[0] operands = parts[1].split(',')", "Flag condition if instr_name in ['jr', 'jp', 'ret', 'call'] and", "','.join(normalize_operand(o, name) for o in operands)) def normalize_operand(operand, instr_name): #", "def fetch(): r = requests.get('http://clrhome.org/table/') if not r.ok: print('Cannot fetch", "'/' in specs[2] else int(specs[2]) opcodes.append({ 'opcode': code, 'mnemonics': normalize(td.text).strip(),", "{table_title(table): parse_table(table) for table in page.find_all('table')} def parse_table(table): print('Table {}'.format(table_title(table)))", "opcodes and outputs it to JSON. \"\"\" page = fetch()", "code = hi << 4 | lo specs = td['axis'].split('|')", "it to JSON. \"\"\" page = fetch() if page is", "in page.find_all('table')} def parse_table(table): print('Table {}'.format(table_title(table))) opcodes = [] for", "row lo = td.parent.index(td) - 1 # column code =", "not None: opcodes = parse_tables(page) with open('opcodes.json', 'w') as output:", "r.text.replace('\\n', '') # Return the data as a BeautifulSoup object", "the possible durations are stored in an array. Otherwise, the", "data as a BeautifulSoup object for easy querying return BeautifulSoup(text,", "'size': int(specs[1]), 'cycles': cycles, 'flags': specs[0], 'description': specs[3] }) print('", "text = r.text.replace('\\n', '') # Return the data as a", "operands)) def normalize_operand(operand, instr_name): # Flag condition if instr_name in", "== 'af\\'': operand = 'af_' return operand if __name__ ==", "Otherwise, the # duration is just stored as a single", "operand if __name__ == '__main__': \"\"\" This scripts fetches the", "nicely formatted data about the Z80 opcodes and outputs it", "durations depending on how they # branch so the possible", "formatted data about the Z80 opcodes and outputs it to", "def table_title(table): return 'main' if table['title'] == '' else table['title'].lower()", "table_title(table): return 'main' if table['title'] == '' else table['title'].lower() def", "= parts[0] operands = parts[1].split(',') if len(parts) > 1 else", "{}'.format(name, ','.join(normalize_operand(o, name) for o in operands)) def normalize_operand(operand, instr_name):", "'pe': 'p', 'p': 'ns', 'm': 's' }.get(operand, operand) # Alt", "'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]), 'cycles': cycles, 'flags': specs[0], 'description': specs[3]", "contents of a webpage that contains nicely formatted data about", "= hi << 4 | lo specs = td['axis'].split('|') #", "'__main__': \"\"\" This scripts fetches the contents of a webpage", "None: opcodes = parse_tables(page) with open('opcodes.json', 'w') as output: json.dump(opcodes,", "'p', 'p': 'ns', 'm': 's' }.get(operand, operand) # Alt registers", "page.find_all('table')} def parse_table(table): print('Table {}'.format(table_title(table))) opcodes = [] for td", "remove newlines text = r.text.replace('\\n', '') # Return the data", "td.text)) return opcodes def normalize(mnemonics): parts = mnemonics.split(' ') name", "'pe', 'p', 'm']: operand = 'f_' + { 'po': 'np',", "fetch {})'.format(r.url)) return None # remove newlines text = r.text.replace('\\n',", "'m']: operand = 'f_' + { 'po': 'np', 'pe': 'p',", "'nc', 'z', 'nz', 'po', 'pe', 'p', 'm']: operand = 'f_'", "= td.parent.index(td) - 1 # column code = hi <<", "{ 'po': 'np', 'pe': 'p', 'p': 'ns', 'm': 's' }.get(operand,", "return operand if __name__ == '__main__': \"\"\" This scripts fetches", "'np', 'pe': 'p', 'p': 'ns', 'm': 's' }.get(operand, operand) #", "if page is not None: opcodes = parse_tables(page) with open('opcodes.json',", "querying return BeautifulSoup(text, 'html.parser') def table_title(table): return 'main' if table['title']", "'opcode': code, 'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]), 'cycles': cycles, 'flags': specs[0],", "r.ok: print('Cannot fetch {})'.format(r.url)) return None # remove newlines text", "normalize(td.text).strip(), 'size': int(specs[1]), 'cycles': cycles, 'flags': specs[0], 'description': specs[3] })", "the # duration is just stored as a single value.", "opcodes def normalize(mnemonics): parts = mnemonics.split(' ') name = parts[0]", "parts[0] operands = parts[1].split(',') if len(parts) > 1 else []", "scripts fetches the contents of a webpage that contains nicely", "'{} {}'.format(name, ','.join(normalize_operand(o, name) for o in operands)) def normalize_operand(operand,", "table['title'] == '' else table['title'].lower() def parse_tables(page): return {table_title(table): parse_table(table)", "operands = parts[1].split(',') if len(parts) > 1 else [] return", "on how they # branch so the possible durations are", "single value. cycles = list(map(int, specs[2].split('/'))) if '/' in specs[2]", "'flags': specs[0], 'description': specs[3] }) print(' {}: {}'.format(hex(code), td.text)) return", "opcodes = [] for td in table.find_all('td', axis=True): hi =", "mnemonics.split(' ') name = parts[0] operands = parts[1].split(',') if len(parts)", "'s' }.get(operand, operand) # Alt registers elif operand == 'af\\'':", "page = fetch() if page is not None: opcodes =", "from bs4 import BeautifulSoup def fetch(): r = requests.get('http://clrhome.org/table/') if", "they # branch so the possible durations are stored in", "table['title'].lower() def parse_tables(page): return {table_title(table): parse_table(table) for table in page.find_all('table')}", "registers elif operand == 'af\\'': operand = 'af_' return operand", "operand == 'af\\'': operand = 'af_' return operand if __name__", "= 'af_' return operand if __name__ == '__main__': \"\"\" This", "easy querying return BeautifulSoup(text, 'html.parser') def table_title(table): return 'main' if", "normalize(mnemonics): parts = mnemonics.split(' ') name = parts[0] operands =", "a BeautifulSoup object for easy querying return BeautifulSoup(text, 'html.parser') def", "opcodes = parse_tables(page) with open('opcodes.json', 'w') as output: json.dump(opcodes, output,", "= td['axis'].split('|') # Conditional instructions have different durations depending on", "= parts[1].split(',') if len(parts) > 1 else [] return '{}", "specs[0], 'description': specs[3] }) print(' {}: {}'.format(hex(code), td.text)) return opcodes", "+ { 'po': 'np', 'pe': 'p', 'p': 'ns', 'm': 's'", "newlines text = r.text.replace('\\n', '') # Return the data as", "'') # Return the data as a BeautifulSoup object for", "1 # column code = hi << 4 | lo", "return BeautifulSoup(text, 'html.parser') def table_title(table): return 'main' if table['title'] ==", "if not r.ok: print('Cannot fetch {})'.format(r.url)) return None # remove", "== '__main__': \"\"\" This scripts fetches the contents of a", "}.get(operand, operand) # Alt registers elif operand == 'af\\'': operand", "instr_name): # Flag condition if instr_name in ['jr', 'jp', 'ret',", "a single value. cycles = list(map(int, specs[2].split('/'))) if '/' in", "= 'f_' + { 'po': 'np', 'pe': 'p', 'p': 'ns',", "an array. Otherwise, the # duration is just stored as", "- 1 # column code = hi << 4 |", "Return the data as a BeautifulSoup object for easy querying", "webpage that contains nicely formatted data about the Z80 opcodes", "the data as a BeautifulSoup object for easy querying return", "in operands)) def normalize_operand(operand, instr_name): # Flag condition if instr_name", "print(' {}: {}'.format(hex(code), td.text)) return opcodes def normalize(mnemonics): parts =", "JSON. \"\"\" page = fetch() if page is not None:", "return 'main' if table['title'] == '' else table['title'].lower() def parse_tables(page):", "array. Otherwise, the # duration is just stored as a", "print('Cannot fetch {})'.format(r.url)) return None # remove newlines text =", "table.find_all('td', axis=True): hi = int(td.parent.find('th').text, 16) # row lo =", "as a single value. cycles = list(map(int, specs[2].split('/'))) if '/'", "cycles = list(map(int, specs[2].split('/'))) if '/' in specs[2] else int(specs[2])", "# row lo = td.parent.index(td) - 1 # column code", "{}: {}'.format(hex(code), td.text)) return opcodes def normalize(mnemonics): parts = mnemonics.split('", "def normalize_operand(operand, instr_name): # Flag condition if instr_name in ['jr',", "else int(specs[2]) opcodes.append({ 'opcode': code, 'mnemonics': normalize(td.text).strip(), 'size': int(specs[1]), 'cycles':" ]
[ "self.assertRaises(ValueError): testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser = IntSightsParser() findings =", "Test from dojo.tools.intsights.parser import IntSightsParser class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self):", "self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP headers weakness in initech.com web", "class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser =", "\"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser =", "testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual(", "headers weakness in company-domain.com web server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile", "TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser()", "test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser = IntSightsParser()", "testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test())", "open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9,", "IntSightsParser class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser", "self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP", "= IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9, len(findings)) def", "testfile.close() self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile = open(", "list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP headers weakness in company-domain.com", "DojoTestCase from dojo.models import Test from dojo.tools.intsights.parser import IntSightsParser class", "\"HTTP headers weakness in company-domain.com web server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self):", "def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser =", "= open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close()", "finding = list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP headers weakness", "Test()) testfile.close() self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser", "list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP headers weakness in initech.com", "= list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP headers weakness in", "in company-domain.com web server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\")", "def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser() findings", "server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser()", "def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser() findings =", "len(findings)) finding = list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP headers", "web server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser =", "test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser() findings =", "parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6',", "company-domain.com web server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser", "import DojoTestCase from dojo.models import Test from dojo.tools.intsights.parser import IntSightsParser", "= open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test())", "IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self):", "Test()) testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool)", "server', finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self):", "'HTTP headers weakness in initech.com web server', finding.title) self.assertEquals('Critical', finding.severity)", "from dojo.models import Test from dojo.tools.intsights.parser import IntSightsParser class TestIntSightsParser(DojoTestCase):", "..dojo_test_case import DojoTestCase from dojo.models import Test from dojo.tools.intsights.parser import", "test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile,", "self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser", "open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1,", "self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP headers weakness in company-domain.com web", "findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0]", "self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser()", "Test()) testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool)", "= open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close()", "len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser() findings", "'5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP headers weakness in initech.com web server',", "weakness in company-domain.com web server\", finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile =", "findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with", "self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile,", "test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile,", "self.assertEqual( \"HTTP headers weakness in company-domain.com web server\", finding.title) def", "testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual(", "finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\")", "finding.title) def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser() findings", "= open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close()", "self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( '5c80dbf83b4a3900078b6be6', finding.unique_id_from_tool) self.assertEqual( 'HTTP", "parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\",", "len(findings)) finding = list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP headers", "parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings))", "self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile =", "\"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP headers weakness in company-domain.com web server\",", "testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test())", "test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser() findings =", "= parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings)) finding = list(findings)[0] self.assertEqual(", "= IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings)) finding", "initech.com web server', finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def", "with self.assertRaises(ValueError): testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser = IntSightsParser() findings", "parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3, len(findings))", "IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1, len(findings)) finding =", "len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser", "= parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError):", "from ..dojo_test_case import DojoTestCase from dojo.models import Test from dojo.tools.intsights.parser", "weakness in initech.com web server', finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\",", "def test_intsights_parser_with_many_vuln_has_many_findings_json(self): testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser() findings =", "open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(1,", "parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile", "finding = list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP headers weakness", "= list(findings)[0] self.assertEqual( \"mn7xy83finmmth4ja363rci9\", finding.unique_id_from_tool) self.assertEqual( \"HTTP headers weakness in", "self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile,", "Test()) testfile.close() self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile =", "findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile", "testfile.close() self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\") parser =", "IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self):", "headers weakness in initech.com web server', finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals(", "import Test from dojo.tools.intsights.parser import IntSightsParser class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json(", "finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile", "= open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close()", "= parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile =", "testfile = open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test())", "testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\") parser = IntSightsParser() findings = parser.get_findings(testfile,", "finding.unique_id_from_tool) self.assertEqual( 'HTTP headers weakness in initech.com web server', finding.title)", "import IntSightsParser class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\")", "parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(9, len(findings))", "parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3, len(findings)) def test_intsights_parser_with_many_vuln_has_many_findings_csv(self): testfile = open(\"unittests/scans/intsights/intsights_many_vuln.csv\")", "= IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3, len(findings)) def", "from dojo.tools.intsights.parser import IntSightsParser class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile", "open(\"unittests/scans/intsights/intsights_many_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(3,", "dojo.models import Test from dojo.tools.intsights.parser import IntSightsParser class TestIntSightsParser(DojoTestCase): def", "dojo.tools.intsights.parser import IntSightsParser class TestIntSightsParser(DojoTestCase): def test_intsights_parser_with_one_critical_vuln_has_one_findings_json( self): testfile =", "finding.unique_id_from_tool) self.assertEqual( \"HTTP headers weakness in company-domain.com web server\", finding.title)", "finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser()", "self.assertEqual( 'HTTP headers weakness in initech.com web server', finding.title) self.assertEquals('Critical',", "testfile = open(\"unittests/scans/intsights/intsights_one_vul.json\") parser = IntSightsParser() findings = parser.get_findings(testfile, Test())", "self.assertEqual(9, len(findings)) def test_intsights_parser_invalid_text_with_error_csv(self): with self.assertRaises(ValueError): testfile = open( \"unittests/scans/intsights/intsights_invalid_file.txt\")", "in initech.com web server', finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references)", "def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv( self): testfile = open(\"unittests/scans/intsights/intsights_one_vuln.csv\") parser = IntSightsParser() findings", "web server', finding.title) self.assertEquals('Critical', finding.severity) self.assertEquals( \"https://dashboard.intsights.com/#/threat-command/alerts?search=5c80dbf83b4a3900078b6be6\", finding.references) def test_intsights_parser_with_one_critical_vuln_has_one_findings_csv(" ]
[ "in byteStr]).strip() def hex_to_byte(hexStr): \"\"\" Convert hex strings to bytes.", "Also report non fatal warnings in stdout if err: report_err(outputs[\"stderr\"],", "backends folder.\"\"\" from __future__ import print_function import subprocess from subprocess", "\"\"\" if (verbose): print(file=sys.stdout, *message) if (file and file !=", "2.0 (the \"License\"); # you may not use this file", "print(\"***\", file=sys.stderr, *message) if (file and file != sys.stderr): err_file", "out_file.close() def byte_to_hex(byteStr): \"\"\" Convert byte sequences to a hex", "sys import os TIMEOUT = 10 * 60 SUCCESS =", "given byte sequences and check if they are the same.", "expected = ''.join(expected.split()).upper() if len(received) < len(expected): report_err(outputs[\"stderr\"], \"Received packet", "None if outputs[\"stderr\"] is not None: try: proc = Popen(args,", "VMware, Inc. # # Licensed under the Apache License, Version", "report to stdout if verbose \"\"\" if (verbose): print(file=sys.stdout, *message)", "%s\\n%s\" % (proc.returncode, errmsg, err)) else: # Also report non", "def run_process(verbose, proc, timeout, outputs, errmsg): def kill(process): process.kill() timer", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "\"Writing\", args) proc = None if outputs[\"stderr\"] is not None:", "have root privileges. Caution: Only works on Unix systems \"\"\"", "report_err(outputs[\"stderr\"], \"Process failed to start\") return proc def run_process(verbose, proc,", "\"\"\" print(\"***\", file=sys.stderr, *message) if (file and file != sys.stderr):", "file != sys.stderr): err_file = open(file, \"a+\") print(\"***\", file=err_file, *message)", "return \"_errors\" in p4filename def report_err(file, *message): \"\"\" Write message", "too short\", len(received), \"vs\", len(expected)) return FAILURE for i in", "\"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper() if len(received) <", "!= received[i]: report_err(outputs[\"stderr\"], \"Received packet \", received) report_err(outputs[\"stderr\"], \"Packet different", "file != sys.stdout): out_file = open(file, \"a+\") print(\"\", file=out_file, *message)", "given arguments as a subprocess. Time out after TIMEOUT seconds", "check if they are the same. Report errors if this", "use this file except in compliance with the License. #", "stderr if verbose \"\"\" print(\"***\", file=sys.stderr, *message) if (file and", "out) report_output(outputs[\"stdout\"], verbose, msg) if proc.returncode != SUCCESS: report_err(outputs[\"stderr\"], \"Error", "stdout if verbose \"\"\" if (verbose): print(file=sys.stdout, *message) if (file", "OUTPUT BEGIN:\\n\" \"%s########### PROCESS OUTPUT END\\n\" % out) report_output(outputs[\"stdout\"], verbose,", "Never even started report_err(outputs[\"stderr\"], \"Process failed to start\") return proc", "60 SUCCESS = 0 FAILURE = 1 SKIPPED = 2", "was not executed def is_err(p4filename): \"\"\" True if the filename", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "by multiple Python testing scripts in the backends folder.\"\"\" from", "or stdout. \"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\", args) proc = None", "License. # You may obtain a copy of the License", "TIMEOUT seconds and report failures or stdout. \"\"\" report_output(outputs[\"stdout\"], verbose,", "sys.stderr): err_file = open(file, \"a+\") print(\"***\", file=err_file, *message) err_file.close() def", "and file != sys.stderr): err_file = open(file, \"a+\") print(\"***\", file=err_file,", "proc = None if outputs[\"stderr\"] is not None: try: proc", "to given file, report to stderr if verbose \"\"\" print(\"***\",", "errmsg): proc = open_process(verbose, args, outputs) if proc is None:", "2], 16))) return ''.join(bytes) def compare_pkt(outputs, expected, received): \"\"\" Compare", "under the License is distributed on an \"AS IS\" BASIS,", "report_output(outputs[\"stdout\"], verbose, \"Executing\", args) return run_process(verbose, proc, timeout, outputs, errmsg)", "License for the specific language governing permissions and # limitations", "if outputs[\"stderr\"] is not None: try: proc = Popen(args, stdout=subprocess.PIPE,", "used occasionally to indicate that a test was not executed", "if (verbose): print(file=sys.stdout, *message) if (file and file != sys.stdout):", "verbose, \"Executing\", args) return run_process(verbose, proc, timeout, outputs, errmsg) def", "\"Received packet \", received) report_err(outputs[\"stderr\"], \"Packet different at position\", i,", "# Copyright 2018 VMware, Inc. # # Licensed under the", "def check_root(): \"\"\" This function returns False if the user", "\"a+\") print(\"\", file=out_file, *message) out_file.close() def byte_to_hex(byteStr): \"\"\" Convert byte", "outputs, errmsg): def kill(process): process.kill() timer = Timer(TIMEOUT, kill, [proc])", "args) return run_process(verbose, proc, timeout, outputs, errmsg) def check_root(): \"\"\"", "len(expected)): if expected[i] == \"*\": continue if expected[i] != received[i]:", "Networks, Inc. # Copyright 2018 VMware, Inc. # # Licensed", "len(hexStr), 2): bytes.append(chr(int(hexStr[i:i + 2], 16))) return ''.join(bytes) def compare_pkt(outputs,", "Python testing scripts in the backends folder.\"\"\" from __future__ import", "Write message to given file, report to stdout if verbose", "indicate that a test was not executed def is_err(p4filename): \"\"\"", "testing framework. Used by multiple Python testing scripts in the", "from __future__ import print_function import subprocess from subprocess import Popen", "permissions and # limitations under the License. \"\"\" Defines helper", "\", received) report_err(outputs[\"stderr\"], \"Packet different at position\", i, \": expected\",", "byte sequences and check if they are the same. Report", "returns False if the user does not have root privileges.", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "in p4filename def report_err(file, *message): \"\"\" Write message to given", "received\", received[i]) report_err(outputs[\"stderr\"], \"Expected packet \", expected) return FAILURE return", "if the user does not have root privileges. Caution: Only", "general testing framework. Used by multiple Python testing scripts in", "return FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\", args) return run_process(verbose, proc, timeout,", "user does not have root privileges. Caution: Only works on", "to stderr if verbose \"\"\" print(\"***\", file=sys.stderr, *message) if (file", "err_file = open(file, \"a+\") print(\"***\", file=err_file, *message) err_file.close() def report_output(file,", "a hex string. \"\"\" return ''.join([\"%02X \" % ord(x) for", "string. \"\"\" return ''.join([\"%02X \" % ord(x) for x in", "outputs) if proc is None: return FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\",", "Write message to given file, report to stderr if verbose", "i in range(0, len(expected)): if expected[i] == \"*\": continue if", "if proc is None: # Never even started report_err(outputs[\"stderr\"], \"Process", "if the filename represents a p4 program that should fail.", "errmsg, err)) else: # Also report non fatal warnings in", "Popen from threading import Timer import sys import os TIMEOUT", "compare_pkt(outputs, expected, received): \"\"\" Compare two given byte sequences and", "\", received\", received[i]) report_err(outputs[\"stderr\"], \"Expected packet \", expected) return FAILURE", "try: timer.start() out, err = proc.communicate() finally: timer.cancel() if out:", "strings to bytes. \"\"\" bytes = [] hexStr = ''.join(hexStr.split(\"", "non fatal warnings in stdout if err: report_err(outputs[\"stderr\"], err) return", "verbose, *message): \"\"\" Write message to given file, report to", "= (\"\\n########### PROCESS OUTPUT BEGIN:\\n\" \"%s########### PROCESS OUTPUT END\\n\" %", "BEGIN:\\n\" \"%s########### PROCESS OUTPUT END\\n\" % out) report_output(outputs[\"stdout\"], verbose, msg)", "helper functions for a general testing framework. Used by multiple", "OSError as e: report_err(outputs[\"stderr\"], \"Failed executing: \", e) if proc", "\"\"\" Run the given arguments as a subprocess. Time out", "file=sys.stderr, *message) if (file and file != sys.stderr): err_file =", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "should fail. \"\"\" return \"_errors\" in p4filename def report_err(file, *message):", "privileges. Caution: Only works on Unix systems \"\"\" return (os.getuid()", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "started report_err(outputs[\"stderr\"], \"Process failed to start\") return proc def run_process(verbose,", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "file, report to stderr if verbose \"\"\" print(\"***\", file=sys.stderr, *message)", "to in writing, software # distributed under the License is", "outputs, errmsg) def check_root(): \"\"\" This function returns False if", "\"*\": continue if expected[i] != received[i]: report_err(outputs[\"stderr\"], \"Received packet \",", "run_process(verbose, proc, timeout, outputs, errmsg) def check_root(): \"\"\" This function", "# See the License for the specific language governing permissions", "if verbose \"\"\" if (verbose): print(file=sys.stdout, *message) if (file and", "not the case. \"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper()", "Copyright 2018 VMware, Inc. # # Licensed under the Apache", "PROCESS OUTPUT BEGIN:\\n\" \"%s########### PROCESS OUTPUT END\\n\" % out) report_output(outputs[\"stdout\"],", "\"\"\" True if the filename represents a p4 program that", "report_err(outputs[\"stderr\"], err) return proc.returncode def run_timeout(verbose, args, timeout, outputs, errmsg):", "in stdout if err: report_err(outputs[\"stderr\"], err) return proc.returncode def run_timeout(verbose,", "or agreed to in writing, software # distributed under the", "open(file, \"a+\") print(\"\", file=out_file, *message) out_file.close() def byte_to_hex(byteStr): \"\"\" Convert", "required by applicable law or agreed to in writing, software", "multiple Python testing scripts in the backends folder.\"\"\" from __future__", "proc.returncode != SUCCESS: report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\" % (proc.returncode, errmsg,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "import Timer import sys import os TIMEOUT = 10 *", "byte sequences to a hex string. \"\"\" return ''.join([\"%02X \"", "''.join(expected.split()).upper() if len(received) < len(expected): report_err(outputs[\"stderr\"], \"Received packet too short\",", "with the License. # You may obtain a copy of", "(file and file != sys.stderr): err_file = open(file, \"a+\") print(\"***\",", "Time out after TIMEOUT seconds and report failures or stdout.", "run_timeout(verbose, args, timeout, outputs, errmsg): proc = open_process(verbose, args, outputs)", "out after TIMEOUT seconds and report failures or stdout. \"\"\"", "the backends folder.\"\"\" from __future__ import print_function import subprocess from", "verbose \"\"\" if (verbose): print(file=sys.stdout, *message) if (file and file", "the case. \"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper() if", "proc def run_process(verbose, proc, timeout, outputs, errmsg): def kill(process): process.kill()", "is not None: try: proc = Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE,", "e: report_err(outputs[\"stderr\"], \"Failed executing: \", e) if proc is None:", "\"Executing\", args) return run_process(verbose, proc, timeout, outputs, errmsg) def check_root():", "compliance with the License. # You may obtain a copy", "subprocess import Popen from threading import Timer import sys import", "agreed to in writing, software # distributed under the License", "err_file.close() def report_output(file, verbose, *message): \"\"\" Write message to given", "short\", len(received), \"vs\", len(expected)) return FAILURE for i in range(0,", "distributed under the License is distributed on an \"AS IS\"", "return proc.returncode def run_timeout(verbose, args, timeout, outputs, errmsg): proc =", "\"\"\" Write message to given file, report to stderr if", "None: try: proc = Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)", "x in byteStr]).strip() def hex_to_byte(hexStr): \"\"\" Convert hex strings to", "expected) return FAILURE return SUCCESS def open_process(verbose, args, outputs): \"\"\"", "*message): \"\"\" Write message to given file, report to stderr", "except OSError as e: report_err(outputs[\"stderr\"], \"Failed executing: \", e) if", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "and file != sys.stdout): out_file = open(file, \"a+\") print(\"\", file=out_file,", "errors if this is not the case. \"\"\" received =", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "report_err(outputs[\"stderr\"], \"Received packet \", received) report_err(outputs[\"stderr\"], \"Packet different at position\",", "not use this file except in compliance with the License.", "writing, software # distributed under the License is distributed on", "you may not use this file except in compliance with", "len(expected)) return FAILURE for i in range(0, len(expected)): if expected[i]", "True if the filename represents a p4 program that should", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= ''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper() if len(received) < len(expected): report_err(outputs[\"stderr\"],", "16))) return ''.join(bytes) def compare_pkt(outputs, expected, received): \"\"\" Compare two", "received[i]) report_err(outputs[\"stderr\"], \"Expected packet \", expected) return FAILURE return SUCCESS", "if (file and file != sys.stderr): err_file = open(file, \"a+\")", "% out) report_output(outputs[\"stdout\"], verbose, msg) if proc.returncode != SUCCESS: report_err(outputs[\"stderr\"],", "not have root privileges. Caution: Only works on Unix systems", "def hex_to_byte(hexStr): \"\"\" Convert hex strings to bytes. \"\"\" bytes", "the same. Report errors if this is not the case.", "python # Copyright 2013-present Barefoot Networks, Inc. # Copyright 2018", "CONDITIONS OF ANY KIND, either express or implied. # See", "2): bytes.append(chr(int(hexStr[i:i + 2], 16))) return ''.join(bytes) def compare_pkt(outputs, expected,", "in range(0, len(expected)): if expected[i] == \"*\": continue if expected[i]", "\", e) if proc is None: # Never even started", "\"Received packet too short\", len(received), \"vs\", len(expected)) return FAILURE for", "= Timer(TIMEOUT, kill, [proc]) try: timer.start() out, err = proc.communicate()", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "outputs[\"stderr\"] is not None: try: proc = Popen(args, stdout=subprocess.PIPE, shell=True,", "governing permissions and # limitations under the License. \"\"\" Defines", "\"\"\" Convert hex strings to bytes. \"\"\" bytes = []", "if proc is None: return FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\", args)", "bytes. \"\"\" bytes = [] hexStr = ''.join(hexStr.split(\" \")) for", "args, outputs) if proc is None: return FAILURE report_output(outputs[\"stdout\"], verbose,", "SUCCESS = 0 FAILURE = 1 SKIPPED = 2 #", "TIMEOUT = 10 * 60 SUCCESS = 0 FAILURE =", "file=out_file, *message) out_file.close() def byte_to_hex(byteStr): \"\"\" Convert byte sequences to", "*message) err_file.close() def report_output(file, verbose, *message): \"\"\" Write message to", "errmsg) def check_root(): \"\"\" This function returns False if the", "return run_process(verbose, proc, timeout, outputs, errmsg) def check_root(): \"\"\" This", "i in range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i + 2], 16))) return", "bytes.append(chr(int(hexStr[i:i + 2], 16))) return ''.join(bytes) def compare_pkt(outputs, expected, received):", "import subprocess from subprocess import Popen from threading import Timer", "Barefoot Networks, Inc. # Copyright 2018 VMware, Inc. # #", "timer.start() out, err = proc.communicate() finally: timer.cancel() if out: msg", "SUCCESS def open_process(verbose, args, outputs): \"\"\" Run the given arguments", "report_err(file, *message): \"\"\" Write message to given file, report to", "FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\", args) return run_process(verbose, proc, timeout, outputs,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "the License is distributed on an \"AS IS\" BASIS, #", "at position\", i, \": expected\", expected[i], \", received\", received[i]) report_err(outputs[\"stderr\"],", "file=err_file, *message) err_file.close() def report_output(file, verbose, *message): \"\"\" Write message", "sequences to a hex string. \"\"\" return ''.join([\"%02X \" %", "the License. \"\"\" Defines helper functions for a general testing", "def is_err(p4filename): \"\"\" True if the filename represents a p4", "''.join([\"%02X \" % ord(x) for x in byteStr]).strip() def hex_to_byte(hexStr):", "after TIMEOUT seconds and report failures or stdout. \"\"\" report_output(outputs[\"stdout\"],", "print_function import subprocess from subprocess import Popen from threading import", "def byte_to_hex(byteStr): \"\"\" Convert byte sequences to a hex string.", "language governing permissions and # limitations under the License. \"\"\"", "to start\") return proc def run_process(verbose, proc, timeout, outputs, errmsg):", "two given byte sequences and check if they are the", "is None: return FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\", args) return run_process(verbose,", "threading import Timer import sys import os TIMEOUT = 10", "(verbose): print(file=sys.stdout, *message) if (file and file != sys.stdout): out_file", "\" % ord(x) for x in byteStr]).strip() def hex_to_byte(hexStr): \"\"\"", "if len(received) < len(expected): report_err(outputs[\"stderr\"], \"Received packet too short\", len(received),", "return proc def run_process(verbose, proc, timeout, outputs, errmsg): def kill(process):", "PROCESS OUTPUT END\\n\" % out) report_output(outputs[\"stdout\"], verbose, msg) if proc.returncode", "report failures or stdout. \"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\", args) proc", "report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\" % (proc.returncode, errmsg, err)) else: #", "*message) out_file.close() def byte_to_hex(byteStr): \"\"\" Convert byte sequences to a", "2 # used occasionally to indicate that a test was", "import print_function import subprocess from subprocess import Popen from threading", "given file, report to stdout if verbose \"\"\" if (verbose):", "print(\"\", file=out_file, *message) out_file.close() def byte_to_hex(byteStr): \"\"\" Convert byte sequences", "does not have root privileges. Caution: Only works on Unix", "law or agreed to in writing, software # distributed under", "\"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\", args) proc = None if outputs[\"stderr\"]", "(proc.returncode, errmsg, err)) else: # Also report non fatal warnings", "if out: msg = (\"\\n########### PROCESS OUTPUT BEGIN:\\n\" \"%s########### PROCESS", "sys.stdout): out_file = open(file, \"a+\") print(\"\", file=out_file, *message) out_file.close() def", "the user does not have root privileges. Caution: Only works", "executing: \", e) if proc is None: # Never even", "Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except OSError as e:", "if (file and file != sys.stdout): out_file = open(file, \"a+\")", "position\", i, \": expected\", expected[i], \", received\", received[i]) report_err(outputs[\"stderr\"], \"Expected", "folder.\"\"\" from __future__ import print_function import subprocess from subprocess import", "# Copyright 2013-present Barefoot Networks, Inc. # Copyright 2018 VMware,", "byte_to_hex(byteStr): \"\"\" Convert byte sequences to a hex string. \"\"\"", "start\") return proc def run_process(verbose, proc, timeout, outputs, errmsg): def", "Compare two given byte sequences and check if they are", "p4filename def report_err(file, *message): \"\"\" Write message to given file,", "may obtain a copy of the License at # #", "hex strings to bytes. \"\"\" bytes = [] hexStr =", "byteStr]).strip() def hex_to_byte(hexStr): \"\"\" Convert hex strings to bytes. \"\"\"", "\"Error %d: %s\\n%s\" % (proc.returncode, errmsg, err)) else: # Also", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "timeout, outputs, errmsg): def kill(process): process.kill() timer = Timer(TIMEOUT, kill,", "proc.communicate() finally: timer.cancel() if out: msg = (\"\\n########### PROCESS OUTPUT", "may not use this file except in compliance with the", "return ''.join([\"%02X \" % ord(x) for x in byteStr]).strip() def", "== \"*\": continue if expected[i] != received[i]: report_err(outputs[\"stderr\"], \"Received packet", "= None if outputs[\"stderr\"] is not None: try: proc =", "print(file=sys.stdout, *message) if (file and file != sys.stdout): out_file =", "to bytes. \"\"\" bytes = [] hexStr = ''.join(hexStr.split(\" \"))", "range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i + 2], 16))) return ''.join(bytes) def", "timeout, outputs, errmsg) def check_root(): \"\"\" This function returns False", "def compare_pkt(outputs, expected, received): \"\"\" Compare two given byte sequences", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "# limitations under the License. \"\"\" Defines helper functions for", "= 0 FAILURE = 1 SKIPPED = 2 # used", "this file except in compliance with the License. # You", "received): \"\"\" Compare two given byte sequences and check if", "stdout. \"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\", args) proc = None if", "proc is None: return FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\", args) return", "this is not the case. \"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper() expected", "\"\"\" Convert byte sequences to a hex string. \"\"\" return", "proc is None: # Never even started report_err(outputs[\"stderr\"], \"Process failed", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "Only works on Unix systems \"\"\" return (os.getuid() == 0)", "to a hex string. \"\"\" return ''.join([\"%02X \" % ord(x)", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "\"Expected packet \", expected) return FAILURE return SUCCESS def open_process(verbose,", "a test was not executed def is_err(p4filename): \"\"\" True if", "msg) if proc.returncode != SUCCESS: report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\" %", "Report errors if this is not the case. \"\"\" received", "FAILURE for i in range(0, len(expected)): if expected[i] == \"*\":", "open(file, \"a+\") print(\"***\", file=err_file, *message) err_file.close() def report_output(file, verbose, *message):", "e) if proc is None: # Never even started report_err(outputs[\"stderr\"],", "that should fail. \"\"\" return \"_errors\" in p4filename def report_err(file,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "packet \", received) report_err(outputs[\"stderr\"], \"Packet different at position\", i, \":", "verbose \"\"\" print(\"***\", file=sys.stderr, *message) if (file and file !=", "from subprocess import Popen from threading import Timer import sys", "import Popen from threading import Timer import sys import os", "= ''.join(hexStr.split(\" \")) for i in range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i", "a general testing framework. Used by multiple Python testing scripts", "report_output(outputs[\"stdout\"], verbose, msg) if proc.returncode != SUCCESS: report_err(outputs[\"stderr\"], \"Error %d:", "if proc.returncode != SUCCESS: report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\" % (proc.returncode,", "END\\n\" % out) report_output(outputs[\"stdout\"], verbose, msg) if proc.returncode != SUCCESS:", "hexStr = ''.join(hexStr.split(\" \")) for i in range(0, len(hexStr), 2):", "\"%s########### PROCESS OUTPUT END\\n\" % out) report_output(outputs[\"stdout\"], verbose, msg) if", "represents a p4 program that should fail. \"\"\" return \"_errors\"", "# Also report non fatal warnings in stdout if err:", "from threading import Timer import sys import os TIMEOUT =", "report_output(file, verbose, *message): \"\"\" Write message to given file, report", "= ''.join(expected.split()).upper() if len(received) < len(expected): report_err(outputs[\"stderr\"], \"Received packet too", "# Never even started report_err(outputs[\"stderr\"], \"Process failed to start\") return", "2018 VMware, Inc. # # Licensed under the Apache License,", "failed to start\") return proc def run_process(verbose, proc, timeout, outputs,", "outputs): \"\"\" Run the given arguments as a subprocess. Time", "report to stderr if verbose \"\"\" print(\"***\", file=sys.stderr, *message) if", "not executed def is_err(p4filename): \"\"\" True if the filename represents", "import sys import os TIMEOUT = 10 * 60 SUCCESS", "return FAILURE return SUCCESS def open_process(verbose, args, outputs): \"\"\" Run", "proc.returncode def run_timeout(verbose, args, timeout, outputs, errmsg): proc = open_process(verbose,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "proc = open_process(verbose, args, outputs) if proc is None: return", "in the backends folder.\"\"\" from __future__ import print_function import subprocess", "\"vs\", len(expected)) return FAILURE for i in range(0, len(expected)): if", "if err: report_err(outputs[\"stderr\"], err) return proc.returncode def run_timeout(verbose, args, timeout,", "#!/usr/bin/env python # Copyright 2013-present Barefoot Networks, Inc. # Copyright", "shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except OSError as e: report_err(outputs[\"stderr\"], \"Failed", "or implied. # See the License for the specific language", "\"\"\" This function returns False if the user does not", "message to given file, report to stderr if verbose \"\"\"", "< len(expected): report_err(outputs[\"stderr\"], \"Received packet too short\", len(received), \"vs\", len(expected))", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "ord(x) for x in byteStr]).strip() def hex_to_byte(hexStr): \"\"\" Convert hex", "and check if they are the same. Report errors if", "out, err = proc.communicate() finally: timer.cancel() if out: msg =", "2013-present Barefoot Networks, Inc. # Copyright 2018 VMware, Inc. #", "\"\"\" Defines helper functions for a general testing framework. Used", "stdout if err: report_err(outputs[\"stderr\"], err) return proc.returncode def run_timeout(verbose, args,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "is not the case. \"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper() expected =", "for x in byteStr]).strip() def hex_to_byte(hexStr): \"\"\" Convert hex strings", "Convert byte sequences to a hex string. \"\"\" return ''.join([\"%02X", "verbose, msg) if proc.returncode != SUCCESS: report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\"", "program that should fail. \"\"\" return \"_errors\" in p4filename def", "= [] hexStr = ''.join(hexStr.split(\" \")) for i in range(0,", "License. \"\"\" Defines helper functions for a general testing framework.", "report_err(outputs[\"stderr\"], \"Packet different at position\", i, \": expected\", expected[i], \",", "timer = Timer(TIMEOUT, kill, [proc]) try: timer.start() out, err =", "[proc]) try: timer.start() out, err = proc.communicate() finally: timer.cancel() if", "packet too short\", len(received), \"vs\", len(expected)) return FAILURE for i", "def report_err(file, *message): \"\"\" Write message to given file, report", "Timer(TIMEOUT, kill, [proc]) try: timer.start() out, err = proc.communicate() finally:", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "+ 2], 16))) return ''.join(bytes) def compare_pkt(outputs, expected, received): \"\"\"", "else: # Also report non fatal warnings in stdout if", "message to given file, report to stdout if verbose \"\"\"", "\")) for i in range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i + 2],", "len(received) < len(expected): report_err(outputs[\"stderr\"], \"Received packet too short\", len(received), \"vs\",", "proc = Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except OSError", "range(0, len(expected)): if expected[i] == \"*\": continue if expected[i] !=", "(file and file != sys.stdout): out_file = open(file, \"a+\") print(\"\",", "err = proc.communicate() finally: timer.cancel() if out: msg = (\"\\n###########", "os TIMEOUT = 10 * 60 SUCCESS = 0 FAILURE", "# # Unless required by applicable law or agreed to", "open_process(verbose, args, outputs): \"\"\" Run the given arguments as a", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "\"a+\") print(\"***\", file=err_file, *message) err_file.close() def report_output(file, verbose, *message): \"\"\"", "out_file = open(file, \"a+\") print(\"\", file=out_file, *message) out_file.close() def byte_to_hex(byteStr):", "!= sys.stdout): out_file = open(file, \"a+\") print(\"\", file=out_file, *message) out_file.close()", "Version 2.0 (the \"License\"); # you may not use this", "kill(process): process.kill() timer = Timer(TIMEOUT, kill, [proc]) try: timer.start() out,", "def report_output(file, verbose, *message): \"\"\" Write message to given file,", "!= SUCCESS: report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\" % (proc.returncode, errmsg, err))", "\", expected) return FAILURE return SUCCESS def open_process(verbose, args, outputs):", "given file, report to stderr if verbose \"\"\" print(\"***\", file=sys.stderr,", "verbose, \"Writing\", args) proc = None if outputs[\"stderr\"] is not", "executed def is_err(p4filename): \"\"\" True if the filename represents a", "warnings in stdout if err: report_err(outputs[\"stderr\"], err) return proc.returncode def", "implied. # See the License for the specific language governing", "stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except OSError as e: report_err(outputs[\"stderr\"], \"Failed executing:", "under the Apache License, Version 2.0 (the \"License\"); # you", "Defines helper functions for a general testing framework. Used by", "\"\"\" return ''.join([\"%02X \" % ord(x) for x in byteStr]).strip()", "= 1 SKIPPED = 2 # used occasionally to indicate", "for a general testing framework. Used by multiple Python testing", "failures or stdout. \"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\", args) proc =", "p4 program that should fail. \"\"\" return \"_errors\" in p4filename", "by applicable law or agreed to in writing, software #", "* 60 SUCCESS = 0 FAILURE = 1 SKIPPED =", "OUTPUT END\\n\" % out) report_output(outputs[\"stdout\"], verbose, msg) if proc.returncode !=", "limitations under the License. \"\"\" Defines helper functions for a", "out: msg = (\"\\n########### PROCESS OUTPUT BEGIN:\\n\" \"%s########### PROCESS OUTPUT", "same. Report errors if this is not the case. \"\"\"", "that a test was not executed def is_err(p4filename): \"\"\" True", "timeout, outputs, errmsg): proc = open_process(verbose, args, outputs) if proc", "hex_to_byte(hexStr): \"\"\" Convert hex strings to bytes. \"\"\" bytes =", "sequences and check if they are the same. Report errors", "for i in range(0, len(expected)): if expected[i] == \"*\": continue", "subprocess. Time out after TIMEOUT seconds and report failures or", "SUCCESS: report_err(outputs[\"stderr\"], \"Error %d: %s\\n%s\" % (proc.returncode, errmsg, err)) else:", "framework. Used by multiple Python testing scripts in the backends", "packet \", expected) return FAILURE return SUCCESS def open_process(verbose, args,", "\"Failed executing: \", e) if proc is None: # Never", "report_err(outputs[\"stderr\"], \"Failed executing: \", e) if proc is None: #", "kill, [proc]) try: timer.start() out, err = proc.communicate() finally: timer.cancel()", "the given arguments as a subprocess. Time out after TIMEOUT", "expected[i], \", received\", received[i]) report_err(outputs[\"stderr\"], \"Expected packet \", expected) return", "\"Process failed to start\") return proc def run_process(verbose, proc, timeout,", "% ord(x) for x in byteStr]).strip() def hex_to_byte(hexStr): \"\"\" Convert", "%d: %s\\n%s\" % (proc.returncode, errmsg, err)) else: # Also report", "proc, timeout, outputs, errmsg) def check_root(): \"\"\" This function returns", "functions for a general testing framework. Used by multiple Python", "msg = (\"\\n########### PROCESS OUTPUT BEGIN:\\n\" \"%s########### PROCESS OUTPUT END\\n\"", "open_process(verbose, args, outputs) if proc is None: return FAILURE report_output(outputs[\"stdout\"],", "expected, received): \"\"\" Compare two given byte sequences and check", "are the same. Report errors if this is not the", "report_output(outputs[\"stdout\"], verbose, \"Writing\", args) proc = None if outputs[\"stderr\"] is", "filename represents a p4 program that should fail. \"\"\" return", "# used occasionally to indicate that a test was not", "they are the same. Report errors if this is not", "print(\"***\", file=err_file, *message) err_file.close() def report_output(file, verbose, *message): \"\"\" Write", "check_root(): \"\"\" This function returns False if the user does", "len(expected): report_err(outputs[\"stderr\"], \"Received packet too short\", len(received), \"vs\", len(expected)) return", "*message) if (file and file != sys.stdout): out_file = open(file,", "run_process(verbose, proc, timeout, outputs, errmsg): def kill(process): process.kill() timer =", "\"_errors\" in p4filename def report_err(file, *message): \"\"\" Write message to", "to given file, report to stdout if verbose \"\"\" if", "case. \"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper() if len(received)", "as a subprocess. Time out after TIMEOUT seconds and report", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "stderr=subprocess.PIPE, universal_newlines=True) except OSError as e: report_err(outputs[\"stderr\"], \"Failed executing: \",", "file, report to stdout if verbose \"\"\" if (verbose): print(file=sys.stdout,", "Timer import sys import os TIMEOUT = 10 * 60", "Unless required by applicable law or agreed to in writing,", "return FAILURE for i in range(0, len(expected)): if expected[i] ==", "import os TIMEOUT = 10 * 60 SUCCESS = 0", "as e: report_err(outputs[\"stderr\"], \"Failed executing: \", e) if proc is", "__future__ import print_function import subprocess from subprocess import Popen from", "*message): \"\"\" Write message to given file, report to stdout", "(\"\\n########### PROCESS OUTPUT BEGIN:\\n\" \"%s########### PROCESS OUTPUT END\\n\" % out)", "Inc. # Copyright 2018 VMware, Inc. # # Licensed under", "finally: timer.cancel() if out: msg = (\"\\n########### PROCESS OUTPUT BEGIN:\\n\"", "subprocess from subprocess import Popen from threading import Timer import", "if this is not the case. \"\"\" received = ''.join(byte_to_hex(str(received)).split()).upper()", "different at position\", i, \": expected\", expected[i], \", received\", received[i])", "the specific language governing permissions and # limitations under the", "occasionally to indicate that a test was not executed def", "hex string. \"\"\" return ''.join([\"%02X \" % ord(x) for x", "applicable law or agreed to in writing, software # distributed", "fail. \"\"\" return \"_errors\" in p4filename def report_err(file, *message): \"\"\"", "\": expected\", expected[i], \", received\", received[i]) report_err(outputs[\"stderr\"], \"Expected packet \",", "if expected[i] == \"*\": continue if expected[i] != received[i]: report_err(outputs[\"stderr\"],", "None: return FAILURE report_output(outputs[\"stdout\"], verbose, \"Executing\", args) return run_process(verbose, proc,", "a p4 program that should fail. \"\"\" return \"_errors\" in", "SKIPPED = 2 # used occasionally to indicate that a", "expected[i] != received[i]: report_err(outputs[\"stderr\"], \"Received packet \", received) report_err(outputs[\"stderr\"], \"Packet", "''.join(hexStr.split(\" \")) for i in range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i +", "timer.cancel() if out: msg = (\"\\n########### PROCESS OUTPUT BEGIN:\\n\" \"%s###########", "in writing, software # distributed under the License is distributed", "len(received), \"vs\", len(expected)) return FAILURE for i in range(0, len(expected)):", "is_err(p4filename): \"\"\" True if the filename represents a p4 program", "the filename represents a p4 program that should fail. \"\"\"", "test was not executed def is_err(p4filename): \"\"\" True if the", "err: report_err(outputs[\"stderr\"], err) return proc.returncode def run_timeout(verbose, args, timeout, outputs,", "= open_process(verbose, args, outputs) if proc is None: return FAILURE", "expected\", expected[i], \", received\", received[i]) report_err(outputs[\"stderr\"], \"Expected packet \", expected)", "root privileges. Caution: Only works on Unix systems \"\"\" return", "for i in range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i + 2], 16)))", "return ''.join(bytes) def compare_pkt(outputs, expected, received): \"\"\" Compare two given", "\"\"\" bytes = [] hexStr = ''.join(hexStr.split(\" \")) for i", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "FAILURE = 1 SKIPPED = 2 # used occasionally to", "# You may obtain a copy of the License at", "= 10 * 60 SUCCESS = 0 FAILURE = 1", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "args, outputs): \"\"\" Run the given arguments as a subprocess.", "received = ''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper() if len(received) < len(expected):", "i, \": expected\", expected[i], \", received\", received[i]) report_err(outputs[\"stderr\"], \"Expected packet", "received[i]: report_err(outputs[\"stderr\"], \"Received packet \", received) report_err(outputs[\"stderr\"], \"Packet different at", "report_err(outputs[\"stderr\"], \"Received packet too short\", len(received), \"vs\", len(expected)) return FAILURE", "args) proc = None if outputs[\"stderr\"] is not None: try:", "even started report_err(outputs[\"stderr\"], \"Process failed to start\") return proc def", "under the License. \"\"\" Defines helper functions for a general", "the License for the specific language governing permissions and #", "Apache License, Version 2.0 (the \"License\"); # you may not", "to stdout if verbose \"\"\" if (verbose): print(file=sys.stdout, *message) if", "''.join(byte_to_hex(str(received)).split()).upper() expected = ''.join(expected.split()).upper() if len(received) < len(expected): report_err(outputs[\"stderr\"], \"Received", "scripts in the backends folder.\"\"\" from __future__ import print_function import", "either express or implied. # See the License for the", "report_err(outputs[\"stderr\"], \"Expected packet \", expected) return FAILURE return SUCCESS def", "= Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except OSError as", "seconds and report failures or stdout. \"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\",", "''.join(bytes) def compare_pkt(outputs, expected, received): \"\"\" Compare two given byte", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "FAILURE return SUCCESS def open_process(verbose, args, outputs): \"\"\" Run the", "Run the given arguments as a subprocess. Time out after", "outputs, errmsg): proc = open_process(verbose, args, outputs) if proc is", "expected[i] == \"*\": continue if expected[i] != received[i]: report_err(outputs[\"stderr\"], \"Received", "if expected[i] != received[i]: report_err(outputs[\"stderr\"], \"Received packet \", received) report_err(outputs[\"stderr\"],", "None: # Never even started report_err(outputs[\"stderr\"], \"Process failed to start\")", "and report failures or stdout. \"\"\" report_output(outputs[\"stdout\"], verbose, \"Writing\", args)", "def run_timeout(verbose, args, timeout, outputs, errmsg): proc = open_process(verbose, args,", "*message) if (file and file != sys.stderr): err_file = open(file,", "= 2 # used occasionally to indicate that a test", "This function returns False if the user does not have", "received) report_err(outputs[\"stderr\"], \"Packet different at position\", i, \": expected\", expected[i],", "and # limitations under the License. \"\"\" Defines helper functions", "\"Packet different at position\", i, \": expected\", expected[i], \", received\",", "if verbose \"\"\" print(\"***\", file=sys.stderr, *message) if (file and file", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "0 FAILURE = 1 SKIPPED = 2 # used occasionally", "1 SKIPPED = 2 # used occasionally to indicate that", "return SUCCESS def open_process(verbose, args, outputs): \"\"\" Run the given", "False if the user does not have root privileges. Caution:", "[] hexStr = ''.join(hexStr.split(\" \")) for i in range(0, len(hexStr),", "continue if expected[i] != received[i]: report_err(outputs[\"stderr\"], \"Received packet \", received)", "!= sys.stderr): err_file = open(file, \"a+\") print(\"***\", file=err_file, *message) err_file.close()", "fatal warnings in stdout if err: report_err(outputs[\"stderr\"], err) return proc.returncode", "\"License\"); # you may not use this file except in", "Caution: Only works on Unix systems \"\"\" return (os.getuid() ==", "try: proc = Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "arguments as a subprocess. Time out after TIMEOUT seconds and", "proc, timeout, outputs, errmsg): def kill(process): process.kill() timer = Timer(TIMEOUT,", "bytes = [] hexStr = ''.join(hexStr.split(\" \")) for i in", "def open_process(verbose, args, outputs): \"\"\" Run the given arguments as", "# distributed under the License is distributed on an \"AS", "% (proc.returncode, errmsg, err)) else: # Also report non fatal", "err) return proc.returncode def run_timeout(verbose, args, timeout, outputs, errmsg): proc", "Convert hex strings to bytes. \"\"\" bytes = [] hexStr", "# Unless required by applicable law or agreed to in", "in range(0, len(hexStr), 2): bytes.append(chr(int(hexStr[i:i + 2], 16))) return ''.join(bytes)", "stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) except OSError as e: report_err(outputs[\"stderr\"],", "def kill(process): process.kill() timer = Timer(TIMEOUT, kill, [proc]) try: timer.start()", "testing scripts in the backends folder.\"\"\" from __future__ import print_function", "\"\"\" Write message to given file, report to stdout if", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "= open(file, \"a+\") print(\"\", file=out_file, *message) out_file.close() def byte_to_hex(byteStr): \"\"\"", "process.kill() timer = Timer(TIMEOUT, kill, [proc]) try: timer.start() out, err", "err)) else: # Also report non fatal warnings in stdout", "= open(file, \"a+\") print(\"***\", file=err_file, *message) err_file.close() def report_output(file, verbose,", "errmsg): def kill(process): process.kill() timer = Timer(TIMEOUT, kill, [proc]) try:", "\"\"\" Compare two given byte sequences and check if they", "<reponame>sktollman/p4c #!/usr/bin/env python # Copyright 2013-present Barefoot Networks, Inc. #", "You may obtain a copy of the License at #", "function returns False if the user does not have root", "Used by multiple Python testing scripts in the backends folder.\"\"\"", "to indicate that a test was not executed def is_err(p4filename):", "Copyright 2013-present Barefoot Networks, Inc. # Copyright 2018 VMware, Inc.", "10 * 60 SUCCESS = 0 FAILURE = 1 SKIPPED", "\"\"\" return \"_errors\" in p4filename def report_err(file, *message): \"\"\" Write", "= proc.communicate() finally: timer.cancel() if out: msg = (\"\\n########### PROCESS", "universal_newlines=True) except OSError as e: report_err(outputs[\"stderr\"], \"Failed executing: \", e)", "not None: try: proc = Popen(args, stdout=subprocess.PIPE, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE,", "if they are the same. Report errors if this is", "report non fatal warnings in stdout if err: report_err(outputs[\"stderr\"], err)", "args, timeout, outputs, errmsg): proc = open_process(verbose, args, outputs) if", "is None: # Never even started report_err(outputs[\"stderr\"], \"Process failed to", "the Apache License, Version 2.0 (the \"License\"); # you may", "a subprocess. Time out after TIMEOUT seconds and report failures" ]
[ "if not temp: raise(Exception) if len(self.indexes) > 1: lastKey =", "self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller = self.rightIndex(index) if self.bigger(smaller, index):", "i, j): x = self.indexes[i] # key of 1 item", "if len(cmd) == 2 and cmd[0] == '' and cmd[1]", "heap.delete(int(cmd[1])) elif cmd[0] == 'search': heap.search(int(cmd[1])) elif cmd[0] == 'min':", "index = self.parentIndex(index) def heapifyDown(self, i=0): index = i while", "= int(len(self.indexes)/2) maxKey = self.indexes[i] index = i while i", "== 'search': heap.search(int(cmd[1])) elif cmd[0] == 'min': heap.min() elif cmd[0]", "True # Check family UwU def hasParent(self, i): if (i", "if self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller = self.rightIndex(index) if self.bigger(smaller,", "'' and cmd[1] == '': continue if cmd[0] == 'add':", "y = self.indexes[j] # key of 2 item # swap", "self.indexes[j] = x temp = self.items[x][1] # index of 1", "self.items[x][1] # index of 1 item # swap indexes in", "temp = self.items.get(key, None) if temp: print('1', temp[1], temp[0]) else:", "+= '[' + str(key) + ' ' + self.items[key][0] if", "Usefull functions def swap(self, i, j): x = self.indexes[i] #", "i*2 + 2 < len(self.indexes): return True return False def", "heap.search(int(cmd[1])) elif cmd[0] == 'min': heap.min() elif cmd[0] == 'max':", "# Check family UwU def hasParent(self, i): if (i -", "index) self.indexes = [] # index - key // to", "self.items.update({y: (self.items[y][0], temp)}) def bigger(self, i, j): if self.indexes[i] <=", "int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def set(self, key, data): temp = self.items.get(key,", "= self.indexes[0] print(key, '0', self.items[key][0]) def max(self): if len(self.indexes) ==", "out += '_ ' * (lineLen - index) print(out[0:-1]) else:", "__init__(self): self.items = dict() # key - (value, index) self.indexes", "def heapifyDown(self, i=0): index = i while self.hasLeft(index): smaller =", "index to 0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else: self.indexes.pop()", ": (self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else: self.indexes.pop() print(rootKey, rootData) def print(self):", "last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self, key): temp = self.items.get(key,", "if i: index = i else: index = len(self.indexes) -", "self.swap(self.parentIndex(index), index) index = self.parentIndex(index) def heapifyDown(self, i=0): index =", "= [] # index - key // to know indexes", "i): if i*2 + 1 < len(self.indexes): return True return", "heapifys def heapifyUp(self, i=None): if i: index = i else:", "cmd[0] == 'search': heap.search(int(cmd[1])) elif cmd[0] == 'min': heap.min() elif", "cmd[0] == 'min': heap.min() elif cmd[0] == 'max': heap.max() elif", "= '' i = 0 if len(self.indexes) == 0: out", "= self.indexes[-1] last = self.items.get(lastKey, None) # set last item", "< len(self.indexes): return True return False def leftIndex(self, i): return", "+ 2) # heapifys def heapifyUp(self, i=None): if i: index", "to deleted index self.indexes[temp[1]] = lastKey self.indexes.pop() del self.items[key] if", "cmd[2]) elif cmd[0] == 'delete': heap.delete(int(cmd[1])) elif cmd[0] == 'search':", "- (value, index) self.indexes = [] # index - key", "self.items.get(lastKey, None) # set last item index of deleted self.items.update({lastKey:", "self.heapifyUp(i=temp[1]) def search(self, key): temp = self.items.get(key, None) if temp:", "print('1', temp[1], temp[0]) else: print('0') def min(self): if len(self.indexes) ==", "' + str(self.indexes[self.parentIndex(i)]) out += ']' if index == lineLen:", "+= '_\\n' print('_') return while i < len(self.indexes): lineLen =", "' * (lineLen - index) print(out[0:-1]) else: print(out, end='') cycle", "index self.indexes[temp[1]] = lastKey self.indexes.pop() del self.items[key] if temp[1] <", "delete(self, key): temp = self.items.get(key, None) if not temp: raise(Exception)", "0: raise(Exception) key = self.indexes[0] print(key, '0', self.items[key][0]) def max(self):", "index = 0 out = '' i = 0 if", "cycle = True heap = Heap() while cycle: try: line", "None): raise(Exception) self.items[key] = (data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def set(self,", "self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index) index = self.parentIndex(index) def heapifyDown(self, i=0):", "index != 0 and index < lineLen: out += '_", "* (lineLen - index) print(out[0:-1]) else: print(out, end='') cycle =", "+ 2 < len(self.indexes): return True return False def rightIndex(self,", "bigger(self, i, j): if self.indexes[i] <= self.indexes[j]: return False else:", "index = len(self.indexes) - 1 while self.hasParent(index) and self.bigger(self.parentIndex(index), index):", "self.indexes[i] # key of 1 item y = self.indexes[j] #", "1) def hasRight(self, i): if i*2 + 2 < len(self.indexes):", "cmd[0] == 'add': heap.add(int(cmd[1]), cmd[2]) elif cmd[0] == 'set': heap.set(int(cmd[1]),", "heap.max() elif cmd[0] == 'extract': heap.extract() elif cmd[0] == 'print':", "self.indexes.pop() print(rootKey, rootData) def print(self): height = 0 index =", "lastKey self.indexes.pop() del self.items[key] if temp[1] < len(self.indexes): # dont", "item # swap keys in index array self.indexes[i] = y", "True return False def leftIndex(self, i): return int(i*2 + 1)", "== 0: raise(Exception) i = int(len(self.indexes)/2) maxKey = self.indexes[i] index", "len(self.indexes): if maxKey < self.indexes[i]: maxKey = self.indexes[i] index =", "max(self): if len(self.indexes) == 0: raise(Exception) i = int(len(self.indexes)/2) maxKey", "out += '_\\n' print('_') return while i < len(self.indexes): lineLen", "1)/2 >= 0: return True return False def parentIndex(self, i):", "cmd[0] == '': continue if len(cmd) == 2 and cmd[0]", "not temp: raise(Exception) if len(self.indexes) > 1: lastKey = self.indexes[-1]", "while self.hasParent(index) and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index) index = self.parentIndex(index)", "< self.indexes[i]: maxKey = self.indexes[i] index = i i +=", "None) if not temp: raise(Exception) if len(self.indexes) > 1: lastKey", "data): if self.items.get(key, None): raise(Exception) self.items[key] = (data, int(len(self.indexes))) self.indexes.append(key)", "elif cmd[0] == 'extract': heap.extract() elif cmd[0] == 'print': heap.print()", "', 2) try: if len(cmd) == 1 and cmd[0] ==", "= line.split(' ', 2) try: if len(cmd) == 1 and", "heap.set(int(cmd[1]), cmd[2]) elif cmd[0] == 'delete': heap.delete(int(cmd[1])) elif cmd[0] ==", "temp: raise(Exception) self.items[key] = (data, temp[1]) def delete(self, key): temp", "print(out, end='') cycle = True heap = Heap() while cycle:", "i=None): if i: index = i else: index = len(self.indexes)", "1 while self.hasParent(index) and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index) index =", "+ str(self.indexes[self.parentIndex(i)]) out += ']' if index == lineLen: out", "index += 1 key = self.indexes[i] out += '[' +", "1 and cmd[0] == '': continue if len(cmd) == 2", "if not temp: raise(Exception) self.items[key] = (data, temp[1]) def delete(self,", "int(i*2 + 1) def hasRight(self, i): if i*2 + 2", "temp: raise(Exception) if len(self.indexes) > 1: lastKey = self.indexes[-1] last", "last item index of deleted self.items.update({lastKey: (last[0], temp[1])}) # set", "0 index = 0 out = '' i = 0", "else: self.indexes.pop() print(rootKey, rootData) def print(self): height = 0 index", "hasParent(self, i): if (i - 1)/2 >= 0: return True", "= (data, temp[1]) def delete(self, key): temp = self.items.get(key, None)", "len(self.indexes): # dont heapify if deleted last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1])", "index array self.indexes[i] = y self.indexes[j] = x temp =", "return while i < len(self.indexes): lineLen = 1 << height", "height += 1 else: out += ' ' i +=", "line = input() cmd = line.split(' ', 2) try: if", "= self.items.get(key, None) if not temp: raise(Exception) if len(self.indexes) >", "else: print('0') def min(self): if len(self.indexes) == 0: raise(Exception) key", "< len(self.indexes): # dont heapify if deleted last element self.heapifyDown(i=temp[1])", "' ' i += 1 if index != 0 and", "j): x = self.indexes[i] # key of 1 item y", "- 1 while self.hasParent(index) and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index) index", "raise(Exception) self.items[key] = (data, temp[1]) def delete(self, key): temp =", "return int(i*2 + 1) def hasRight(self, i): if i*2 +", "i else: index = len(self.indexes) - 1 while self.hasParent(index) and", "str(self.indexes[self.parentIndex(i)]) out += ']' if index == lineLen: out +=", "in index array self.indexes[i] = y self.indexes[j] = x temp", "1 << height index += 1 key = self.indexes[i] out", "'min': heap.min() elif cmd[0] == 'max': heap.max() elif cmd[0] ==", "'': continue if len(cmd) == 2 and cmd[0] == ''", "'': continue if cmd[0] == 'add': heap.add(int(cmd[1]), cmd[2]) elif cmd[0]", "key of last item to deleted index self.indexes[temp[1]] = lastKey", "know indexes # Usefull functions def swap(self, i, j): x", "+ ' ' + self.items[key][0] if height != 0: out", "if deleted last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self, key): temp", "self.items = dict() # key - (value, index) self.indexes =", "family UwU def hasParent(self, i): if (i - 1)/2 >=", "maxKey < self.indexes[i]: maxKey = self.indexes[i] index = i i", "return True return False def parentIndex(self, i): return int((i -", "# key of 2 item # swap keys in index", "i = 0 if len(self.indexes) == 0: out += '_\\n'", "rootData) def print(self): height = 0 index = 0 out", "self.indexes[j]: return False else: return True # Check family UwU", "dont heapify if deleted last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self,", "1 print(maxKey, index, self.items[maxKey][0]) def extract(self): if len(self.indexes) == 0:", "0 out = '' i = 0 if len(self.indexes) ==", "self.indexes = [] # index - key // to know", "(data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def set(self, key, data): temp =", "if height != 0: out += ' ' + str(self.indexes[self.parentIndex(i)])", "else: print(out, end='') cycle = True heap = Heap() while", "None) # set last item index of deleted self.items.update({lastKey: (last[0],", "cmd[1] == '': continue if cmd[0] == 'add': heap.add(int(cmd[1]), cmd[2])", "item y = self.indexes[j] # key of 2 item #", "+ 1 < len(self.indexes): return True return False def leftIndex(self,", "== 2 and cmd[0] == '' and cmd[1] == '':", "'\\n' index = 0 height += 1 else: out +=", "temp[0]) else: print('0') def min(self): if len(self.indexes) == 0: raise(Exception)", "cmd = line.split(' ', 2) try: if len(cmd) == 1", "False def parentIndex(self, i): return int((i - 1)/2) def hasLeft(self,", "= self.items.get(key, None) if temp: print('1', temp[1], temp[0]) else: print('0')", "int(i*2 + 2) # heapifys def heapifyUp(self, i=None): if i:", "'_ ' * (lineLen - index) print(out[0:-1]) else: print(out, end='')", "del self.items[key] if temp[1] < len(self.indexes): # dont heapify if", "<= self.indexes[j]: return False else: return True # Check family", "of 1 item # swap indexes in dictionary self.items.update({x: (self.items[x][0],", "if index == lineLen: out += '\\n' index = 0", "i < len(self.indexes): lineLen = 1 << height index +=", "self.indexes[i] index = i while i < len(self.indexes): if maxKey", "= input() cmd = line.split(' ', 2) try: if len(cmd)", "== 'print': heap.print() else: raise(Exception) except Exception: print('error') continue except", "def print(self): height = 0 index = 0 out =", "= len(self.indexes) - 1 while self.hasParent(index) and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index),", "set key of last item to deleted index self.indexes[temp[1]] =", "False def leftIndex(self, i): return int(i*2 + 1) def hasRight(self,", "item to deleted index self.indexes[temp[1]] = lastKey self.indexes.pop() del self.items[key]", "heap.add(int(cmd[1]), cmd[2]) elif cmd[0] == 'set': heap.set(int(cmd[1]), cmd[2]) elif cmd[0]", "all needed methods def add(self, key, data): if self.items.get(key, None):", "temp[1], temp[0]) else: print('0') def min(self): if len(self.indexes) == 0:", "def max(self): if len(self.indexes) == 0: raise(Exception) i = int(len(self.indexes)/2)", "self.leftIndex(index) if self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller = self.rightIndex(index) if", "== 0: out += '_\\n' print('_') return while i <", "0 if len(self.indexes) == 0: out += '_\\n' print('_') return", "index == lineLen: out += '\\n' index = 0 height", "of deleted self.items.update({lastKey: (last[0], temp[1])}) # set key of last", "def add(self, key, data): if self.items.get(key, None): raise(Exception) self.items[key] =", "= self.items.get(lastKey, None) # set last item index of deleted", "set last item index of deleted self.items.update({lastKey: (last[0], temp[1])}) #", "= y self.indexes[j] = x temp = self.items[x][1] # index", "!= 0 and index < lineLen: out += '_ '", "i while i < len(self.indexes): if maxKey < self.indexes[i]: maxKey", "- index) print(out[0:-1]) else: print(out, end='') cycle = True heap", "index): self.swap(self.parentIndex(index), index) index = self.parentIndex(index) def heapifyDown(self, i=0): index", "+ str(key) + ' ' + self.items[key][0] if height !=", "'extract': heap.extract() elif cmd[0] == 'print': heap.print() else: raise(Exception) except", "< len(self.indexes): return True return False def rightIndex(self, i): return", "= self.indexes.pop() # set top item index to 0 self.items.update({self.indexes[0]", "def rightIndex(self, i): return int(i*2 + 2) # heapifys def", "< lineLen: out += '_ ' * (lineLen - index)", "of last item to deleted index self.indexes[temp[1]] = lastKey self.indexes.pop()", "+= '\\n' index = 0 height += 1 else: out", "print('_') return while i < len(self.indexes): lineLen = 1 <<", "break else: self.swap(index, smaller) index = smaller # all needed", "keys in index array self.indexes[i] = y self.indexes[j] = x", "def hasParent(self, i): if (i - 1)/2 >= 0: return", "else: index = len(self.indexes) - 1 while self.hasParent(index) and self.bigger(self.parentIndex(index),", "if len(self.indexes) == 0: out += '_\\n' print('_') return while", "out += '\\n' index = 0 height += 1 else:", "= smaller # all needed methods def add(self, key, data):", "self.bigger(smaller, index): break else: self.swap(index, smaller) index = smaller #", "= 0 out = '' i = 0 if len(self.indexes)", "top item index to 0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)}) self.heapifyDown()", "index = i i += 1 print(maxKey, index, self.items[maxKey][0]) def", "= lastKey self.indexes.pop() del self.items[key] if temp[1] < len(self.indexes): #", "len(cmd) == 2 and cmd[0] == '' and cmd[1] ==", "if len(cmd) == 1 and cmd[0] == '': continue if", "len(self.indexes): return True return False def rightIndex(self, i): return int(i*2", "2 and cmd[0] == '' and cmd[1] == '': continue", "' ' + str(self.indexes[self.parentIndex(i)]) out += ']' if index ==", "index = i else: index = len(self.indexes) - 1 while", "(self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)}) def bigger(self, i, j): if", "else: out += ' ' i += 1 if index", "heapifyUp(self, i=None): if i: index = i else: index =", "+= ' ' i += 1 if index != 0", "- key // to know indexes # Usefull functions def", "i while self.hasLeft(index): smaller = self.leftIndex(index) if self.hasRight(index) and self.bigger(self.leftIndex(index),", "# set last item index of deleted self.items.update({lastKey: (last[0], temp[1])})", "x temp = self.items[x][1] # index of 1 item #", "index = i while i < len(self.indexes): if maxKey <", "print(maxKey, index, self.items[maxKey][0]) def extract(self): if len(self.indexes) == 0: raise(Exception)", "if len(self.indexes) > 1: self.indexes[0] = self.indexes.pop() # set top", "'search': heap.search(int(cmd[1])) elif cmd[0] == 'min': heap.min() elif cmd[0] ==", "# swap keys in index array self.indexes[i] = y self.indexes[j]", "= 0 height += 1 else: out += ' '", "= x temp = self.items[x][1] # index of 1 item", "self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)}) def bigger(self, i, j):", "needed methods def add(self, key, data): if self.items.get(key, None): raise(Exception)", "raise(Exception) except Exception: print('error') continue except Exception: cycle = False", "methods def add(self, key, data): if self.items.get(key, None): raise(Exception) self.items[key]", "item # swap indexes in dictionary self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y:", "1: lastKey = self.indexes[-1] last = self.items.get(lastKey, None) # set", "if len(self.indexes) == 0: raise(Exception) i = int(len(self.indexes)/2) maxKey =", "= Heap() while cycle: try: line = input() cmd =", "len(cmd) == 1 and cmd[0] == '': continue if len(cmd)", "# set top item index to 0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0],", "lineLen = 1 << height index += 1 key =", "= self.items.get(key, None) if not temp: raise(Exception) self.items[key] = (data,", "False else: return True # Check family UwU def hasParent(self,", "len(self.indexes) == 0: raise(Exception) rootKey = self.indexes[0] rootData = self.items[rootKey][0]", "return True return False def rightIndex(self, i): return int(i*2 +", "temp[1])}) # set key of last item to deleted index", "']' if index == lineLen: out += '\\n' index =", "smaller = self.leftIndex(index) if self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller =", "key = self.indexes[0] print(key, '0', self.items[key][0]) def max(self): if len(self.indexes)", "- 1)/2) def hasLeft(self, i): if i*2 + 1 <", "print(out[0:-1]) else: print(out, end='') cycle = True heap = Heap()", "return False def leftIndex(self, i): return int(i*2 + 1) def", "if self.indexes[i] <= self.indexes[j]: return False else: return True #", "= self.indexes[i] index = i while i < len(self.indexes): if", "class Heap: def __init__(self): self.items = dict() # key -", "> 1: self.indexes[0] = self.indexes.pop() # set top item index", "heapifyDown(self, i=0): index = i while self.hasLeft(index): smaller = self.leftIndex(index)", "functions def swap(self, i, j): x = self.indexes[i] # key", "to 0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else: self.indexes.pop() print(rootKey,", "- 1)/2 >= 0: return True return False def parentIndex(self,", "index = 0 height += 1 else: out += '", "if temp[1] < len(self.indexes): # dont heapify if deleted last", "self.indexes[i] = y self.indexes[j] = x temp = self.items[x][1] #", "2 < len(self.indexes): return True return False def rightIndex(self, i):", "== 'max': heap.max() elif cmd[0] == 'extract': heap.extract() elif cmd[0]", "None) if temp: print('1', temp[1], temp[0]) else: print('0') def min(self):", "and cmd[1] == '': continue if cmd[0] == 'add': heap.add(int(cmd[1]),", "hasLeft(self, i): if i*2 + 1 < len(self.indexes): return True", "= (data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def set(self, key, data): temp", "temp)}) def bigger(self, i, j): if self.indexes[i] <= self.indexes[j]: return", "'print': heap.print() else: raise(Exception) except Exception: print('error') continue except Exception:", "i): return int((i - 1)/2) def hasLeft(self, i): if i*2", "self.heapifyUp() def set(self, key, data): temp = self.items.get(key, None) if", "lineLen: out += '_ ' * (lineLen - index) print(out[0:-1])", "<gh_stars>0 class Heap: def __init__(self): self.items = dict() # key", "(self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else: self.indexes.pop() print(rootKey, rootData) def print(self): height", "if (i - 1)/2 >= 0: return True return False", "i): return int(i*2 + 2) # heapifys def heapifyUp(self, i=None):", "None) if not temp: raise(Exception) self.items[key] = (data, temp[1]) def", "= 0 if len(self.indexes) == 0: out += '_\\n' print('_')", "+ self.items[key][0] if height != 0: out += ' '", "j): if self.indexes[i] <= self.indexes[j]: return False else: return True", "out += ' ' i += 1 if index !=", "i < len(self.indexes): if maxKey < self.indexes[i]: maxKey = self.indexes[i]", "index = i while self.hasLeft(index): smaller = self.leftIndex(index) if self.hasRight(index)", "self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self, key): temp = self.items.get(key, None) if", "index, self.items[maxKey][0]) def extract(self): if len(self.indexes) == 0: raise(Exception) rootKey", "= dict() # key - (value, index) self.indexes = []", "== 'set': heap.set(int(cmd[1]), cmd[2]) elif cmd[0] == 'delete': heap.delete(int(cmd[1])) elif", "= self.parentIndex(index) def heapifyDown(self, i=0): index = i while self.hasLeft(index):", "self.indexes[0] rootData = self.items[rootKey][0] del self.items[rootKey] if len(self.indexes) > 1:", "def bigger(self, i, j): if self.indexes[i] <= self.indexes[j]: return False", "self.items.get(key, None) if not temp: raise(Exception) self.items[key] = (data, temp[1])", "0: raise(Exception) i = int(len(self.indexes)/2) maxKey = self.indexes[i] index =", "item index to 0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else:", "temp = self.items.get(key, None) if not temp: raise(Exception) self.items[key] =", "len(self.indexes): lineLen = 1 << height index += 1 key", "and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index) index = self.parentIndex(index) def heapifyDown(self,", "def delete(self, key): temp = self.items.get(key, None) if not temp:", "index < lineLen: out += '_ ' * (lineLen -", "raise(Exception) rootKey = self.indexes[0] rootData = self.items[rootKey][0] del self.items[rootKey] if", "= self.indexes[0] rootData = self.items[rootKey][0] del self.items[rootKey] if len(self.indexes) >", "== 'add': heap.add(int(cmd[1]), cmd[2]) elif cmd[0] == 'set': heap.set(int(cmd[1]), cmd[2])", "y self.indexes[j] = x temp = self.items[x][1] # index of", "self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)}) def bigger(self, i, j): if self.indexes[i]", "heapify if deleted last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self, key):", "return False def rightIndex(self, i): return int(i*2 + 2) #", "key = self.indexes[i] out += '[' + str(key) + '", "def extract(self): if len(self.indexes) == 0: raise(Exception) rootKey = self.indexes[0]", "i): if i*2 + 2 < len(self.indexes): return True return", "maxKey = self.indexes[i] index = i while i < len(self.indexes):", "== 0: raise(Exception) key = self.indexes[0] print(key, '0', self.items[key][0]) def", "temp = self.items.get(key, None) if not temp: raise(Exception) if len(self.indexes)", "self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else: self.indexes.pop() print(rootKey, rootData) def", "swap indexes in dictionary self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)})", "dictionary self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)}) def bigger(self, i,", "+= 1 print(maxKey, index, self.items[maxKey][0]) def extract(self): if len(self.indexes) ==", "return True # Check family UwU def hasParent(self, i): if", "to know indexes # Usefull functions def swap(self, i, j):", "index) print(out[0:-1]) else: print(out, end='') cycle = True heap =", "+= '_ ' * (lineLen - index) print(out[0:-1]) else: print(out,", "= self.items[x][1] # index of 1 item # swap indexes", "out += '[' + str(key) + ' ' + self.items[key][0]", "(last[0], temp[1])}) # set key of last item to deleted", "len(self.indexes): return True return False def leftIndex(self, i): return int(i*2", "set top item index to 0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)})", "!= 0: out += ' ' + str(self.indexes[self.parentIndex(i)]) out +=", "# set key of last item to deleted index self.indexes[temp[1]]", "def swap(self, i, j): x = self.indexes[i] # key of", "if index != 0 and index < lineLen: out +=", "self.indexes.pop() # set top item index to 0 self.items.update({self.indexes[0] :", "line.split(' ', 2) try: if len(cmd) == 1 and cmd[0]", "key): temp = self.items.get(key, None) if not temp: raise(Exception) if", "cmd[0] == '' and cmd[1] == '': continue if cmd[0]", "'delete': heap.delete(int(cmd[1])) elif cmd[0] == 'search': heap.search(int(cmd[1])) elif cmd[0] ==", "index): break else: self.swap(index, smaller) index = smaller # all", "self.indexes[i] index = i i += 1 print(maxKey, index, self.items[maxKey][0])", ">= 0: return True return False def parentIndex(self, i): return", "Heap() while cycle: try: line = input() cmd = line.split('", "cmd[2]) elif cmd[0] == 'set': heap.set(int(cmd[1]), cmd[2]) elif cmd[0] ==", "return False else: return True # Check family UwU def", "try: if len(cmd) == 1 and cmd[0] == '': continue", "print(rootKey, rootData) def print(self): height = 0 index = 0", "self.items[key] = (data, temp[1]) def delete(self, key): temp = self.items.get(key,", "if i*2 + 2 < len(self.indexes): return True return False", "i, j): if self.indexes[i] <= self.indexes[j]: return False else: return", "(i - 1)/2 >= 0: return True return False def", "cmd[0] == 'delete': heap.delete(int(cmd[1])) elif cmd[0] == 'search': heap.search(int(cmd[1])) elif", "1)/2) def hasLeft(self, i): if i*2 + 1 < len(self.indexes):", "i): return int(i*2 + 1) def hasRight(self, i): if i*2", "heap = Heap() while cycle: try: line = input() cmd", "elif cmd[0] == 'max': heap.max() elif cmd[0] == 'extract': heap.extract()", "item index of deleted self.items.update({lastKey: (last[0], temp[1])}) # set key", "height != 0: out += ' ' + str(self.indexes[self.parentIndex(i)]) out", "height = 0 index = 0 out = '' i", "if cmd[0] == 'add': heap.add(int(cmd[1]), cmd[2]) elif cmd[0] == 'set':", "' ' + self.items[key][0] if height != 0: out +=", "out += ' ' + str(self.indexes[self.parentIndex(i)]) out += ']' if", "try: line = input() cmd = line.split(' ', 2) try:", "index) index = self.parentIndex(index) def heapifyDown(self, i=0): index = i", "self.items.get(key, None): raise(Exception) self.items[key] = (data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def", "1 item # swap indexes in dictionary self.items.update({x: (self.items[x][0], self.items[y][1])})", "1 if index != 0 and index < lineLen: out", "2 item # swap keys in index array self.indexes[i] =", "self.hasLeft(index): smaller = self.leftIndex(index) if self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller", "len(self.indexes) > 1: lastKey = self.indexes[-1] last = self.items.get(lastKey, None)", "0: return True return False def parentIndex(self, i): return int((i", "= i else: index = len(self.indexes) - 1 while self.hasParent(index)", "str(key) + ' ' + self.items[key][0] if height != 0:", "1 else: out += ' ' i += 1 if", "heap.print() else: raise(Exception) except Exception: print('error') continue except Exception: cycle", "+= 1 if index != 0 and index < lineLen:", "smaller) index = smaller # all needed methods def add(self,", "print('0') def min(self): if len(self.indexes) == 0: raise(Exception) key =", "self.rightIndex(index)): smaller = self.rightIndex(index) if self.bigger(smaller, index): break else: self.swap(index,", "data): temp = self.items.get(key, None) if not temp: raise(Exception) self.items[key]", "self.indexes[i] <= self.indexes[j]: return False else: return True # Check", "True heap = Heap() while cycle: try: line = input()", "continue if len(cmd) == 2 and cmd[0] == '' and", "' i += 1 if index != 0 and index", "rootData = self.items[rootKey][0] del self.items[rootKey] if len(self.indexes) > 1: self.indexes[0]", "0: raise(Exception) rootKey = self.indexes[0] rootData = self.items[rootKey][0] del self.items[rootKey]", "self.indexes[-1] last = self.items.get(lastKey, None) # set last item index", "key, data): if self.items.get(key, None): raise(Exception) self.items[key] = (data, int(len(self.indexes)))", "def min(self): if len(self.indexes) == 0: raise(Exception) key = self.indexes[0]", "# Usefull functions def swap(self, i, j): x = self.indexes[i]", "maxKey = self.indexes[i] index = i i += 1 print(maxKey,", "= self.indexes[i] index = i i += 1 print(maxKey, index,", "True return False def parentIndex(self, i): return int((i - 1)/2)", "i = int(len(self.indexes)/2) maxKey = self.indexes[i] index = i while", "last item to deleted index self.indexes[temp[1]] = lastKey self.indexes.pop() del", "== 0: raise(Exception) rootKey = self.indexes[0] rootData = self.items[rootKey][0] del", "== 'extract': heap.extract() elif cmd[0] == 'print': heap.print() else: raise(Exception)", "'' i = 0 if len(self.indexes) == 0: out +=", "self.indexes[0] print(key, '0', self.items[key][0]) def max(self): if len(self.indexes) == 0:", "else: self.swap(index, smaller) index = smaller # all needed methods", "= self.indexes[i] # key of 1 item y = self.indexes[j]", "0 self.items.update({self.indexes[0] : (self.items[self.indexes[0]][0], 0)}) self.heapifyDown() else: self.indexes.pop() print(rootKey, rootData)", "[] # index - key // to know indexes #", "self.items[key][0]) def max(self): if len(self.indexes) == 0: raise(Exception) i =", "= i while i < len(self.indexes): if maxKey < self.indexes[i]:", "'0', self.items[key][0]) def max(self): if len(self.indexes) == 0: raise(Exception) i", "# index - key // to know indexes # Usefull", "temp[1]) def delete(self, key): temp = self.items.get(key, None) if not", "int(len(self.indexes)/2) maxKey = self.indexes[i] index = i while i <", "if self.bigger(smaller, index): break else: self.swap(index, smaller) index = smaller", "def set(self, key, data): temp = self.items.get(key, None) if not", "def heapifyUp(self, i=None): if i: index = i else: index", "# heapifys def heapifyUp(self, i=None): if i: index = i", "raise(Exception) self.items[key] = (data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def set(self, key,", "i*2 + 1 < len(self.indexes): return True return False def", "# dont heapify if deleted last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def", "if len(self.indexes) > 1: lastKey = self.indexes[-1] last = self.items.get(lastKey,", "def parentIndex(self, i): return int((i - 1)/2) def hasLeft(self, i):", "= self.leftIndex(index) if self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller = self.rightIndex(index)", "self.indexes[temp[1]] = lastKey self.indexes.pop() del self.items[key] if temp[1] < len(self.indexes):", "swap keys in index array self.indexes[i] = y self.indexes[j] =", "self.rightIndex(index) if self.bigger(smaller, index): break else: self.swap(index, smaller) index =", "and index < lineLen: out += '_ ' * (lineLen", "== '' and cmd[1] == '': continue if cmd[0] ==", "indexes in dictionary self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)}) def", "set(self, key, data): temp = self.items.get(key, None) if not temp:", "if self.items.get(key, None): raise(Exception) self.items[key] = (data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp()", "print(key, '0', self.items[key][0]) def max(self): if len(self.indexes) == 0: raise(Exception)", "rightIndex(self, i): return int(i*2 + 2) # heapifys def heapifyUp(self,", "len(self.indexes) > 1: self.indexes[0] = self.indexes.pop() # set top item", "self.items[key] = (data, int(len(self.indexes))) self.indexes.append(key) self.heapifyUp() def set(self, key, data):", "extract(self): if len(self.indexes) == 0: raise(Exception) rootKey = self.indexes[0] rootData", "= self.items[rootKey][0] del self.items[rootKey] if len(self.indexes) > 1: self.indexes[0] =", "2) # heapifys def heapifyUp(self, i=None): if i: index =", "heap.min() elif cmd[0] == 'max': heap.max() elif cmd[0] == 'extract':", "search(self, key): temp = self.items.get(key, None) if temp: print('1', temp[1],", "= self.indexes[i] out += '[' + str(key) + ' '", "0)}) self.heapifyDown() else: self.indexes.pop() print(rootKey, rootData) def print(self): height =", "and cmd[0] == '': continue if len(cmd) == 2 and", "height index += 1 key = self.indexes[i] out += '['", "elif cmd[0] == 'print': heap.print() else: raise(Exception) except Exception: print('error')", "index - key // to know indexes # Usefull functions", "def leftIndex(self, i): return int(i*2 + 1) def hasRight(self, i):", "i=0): index = i while self.hasLeft(index): smaller = self.leftIndex(index) if", "< len(self.indexes): lineLen = 1 << height index += 1", "of 2 item # swap keys in index array self.indexes[i]", "last = self.items.get(lastKey, None) # set last item index of", "len(self.indexes) == 0: out += '_\\n' print('_') return while i", "0: out += '_\\n' print('_') return while i < len(self.indexes):", "elif cmd[0] == 'set': heap.set(int(cmd[1]), cmd[2]) elif cmd[0] == 'delete':", "key of 2 item # swap keys in index array", "(data, temp[1]) def delete(self, key): temp = self.items.get(key, None) if", "2) try: if len(cmd) == 1 and cmd[0] == '':", "deleted index self.indexes[temp[1]] = lastKey self.indexes.pop() del self.items[key] if temp[1]", "elif cmd[0] == 'search': heap.search(int(cmd[1])) elif cmd[0] == 'min': heap.min()", "of 1 item y = self.indexes[j] # key of 2", "swap(self, i, j): x = self.indexes[i] # key of 1", "def search(self, key): temp = self.items.get(key, None) if temp: print('1',", "while cycle: try: line = input() cmd = line.split(' ',", "> 1: lastKey = self.indexes[-1] last = self.items.get(lastKey, None) #", "= i while self.hasLeft(index): smaller = self.leftIndex(index) if self.hasRight(index) and", "+= ']' if index == lineLen: out += '\\n' index", "== lineLen: out += '\\n' index = 0 height +=", "elif cmd[0] == 'delete': heap.delete(int(cmd[1])) elif cmd[0] == 'search': heap.search(int(cmd[1]))", "Check family UwU def hasParent(self, i): if (i - 1)/2", "' + self.items[key][0] if height != 0: out += '", "i += 1 print(maxKey, index, self.items[maxKey][0]) def extract(self): if len(self.indexes)", "key // to know indexes # Usefull functions def swap(self,", "and self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller = self.rightIndex(index) if self.bigger(smaller, index): break", "while i < len(self.indexes): if maxKey < self.indexes[i]: maxKey =", "# all needed methods def add(self, key, data): if self.items.get(key,", "min(self): if len(self.indexes) == 0: raise(Exception) key = self.indexes[0] print(key,", "else: return True # Check family UwU def hasParent(self, i):", "return False def parentIndex(self, i): return int((i - 1)/2) def", "0 height += 1 else: out += ' ' i", "while i < len(self.indexes): lineLen = 1 << height index", "# key - (value, index) self.indexes = [] # index", "heap.extract() elif cmd[0] == 'print': heap.print() else: raise(Exception) except Exception:", "True return False def rightIndex(self, i): return int(i*2 + 2)", "# key of 1 item y = self.indexes[j] # key", "self.indexes[i] out += '[' + str(key) + ' ' +", "return int((i - 1)/2) def hasLeft(self, i): if i*2 +", "elif cmd[0] == 'min': heap.min() elif cmd[0] == 'max': heap.max()", "'[' + str(key) + ' ' + self.items[key][0] if height", "== 'min': heap.min() elif cmd[0] == 'max': heap.max() elif cmd[0]", "self.items[rootKey] if len(self.indexes) > 1: self.indexes[0] = self.indexes.pop() # set", "False def rightIndex(self, i): return int(i*2 + 2) # heapifys", "key of 1 item y = self.indexes[j] # key of", "int((i - 1)/2) def hasLeft(self, i): if i*2 + 1", "x = self.indexes[i] # key of 1 item y =", "# swap indexes in dictionary self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0],", "add(self, key, data): if self.items.get(key, None): raise(Exception) self.items[key] = (data,", "parentIndex(self, i): return int((i - 1)/2) def hasLeft(self, i): if", "return True return False def leftIndex(self, i): return int(i*2 +", "len(self.indexes) == 0: raise(Exception) key = self.indexes[0] print(key, '0', self.items[key][0])", "self.items[rootKey][0] del self.items[rootKey] if len(self.indexes) > 1: self.indexes[0] = self.indexes.pop()", "1 < len(self.indexes): return True return False def leftIndex(self, i):", "cmd[0] == 'set': heap.set(int(cmd[1]), cmd[2]) elif cmd[0] == 'delete': heap.delete(int(cmd[1]))", "== '': continue if cmd[0] == 'add': heap.add(int(cmd[1]), cmd[2]) elif", "if len(self.indexes) == 0: raise(Exception) rootKey = self.indexes[0] rootData =", "len(self.indexes) - 1 while self.hasParent(index) and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index)", "UwU def hasParent(self, i): if (i - 1)/2 >= 0:", "def hasLeft(self, i): if i*2 + 1 < len(self.indexes): return", "i i += 1 print(maxKey, index, self.items[maxKey][0]) def extract(self): if", "raise(Exception) i = int(len(self.indexes)/2) maxKey = self.indexes[i] index = i", "lastKey = self.indexes[-1] last = self.items.get(lastKey, None) # set last", "temp = self.items[x][1] # index of 1 item # swap", "self.heapifyDown() else: self.indexes.pop() print(rootKey, rootData) def print(self): height = 0", "self.items.update({lastKey: (last[0], temp[1])}) # set key of last item to", "rootKey = self.indexes[0] rootData = self.items[rootKey][0] del self.items[rootKey] if len(self.indexes)", "= self.indexes[j] # key of 2 item # swap keys", "deleted self.items.update({lastKey: (last[0], temp[1])}) # set key of last item", "if i*2 + 1 < len(self.indexes): return True return False", "self.indexes[i]: maxKey = self.indexes[i] index = i i += 1", "out += ']' if index == lineLen: out += '\\n'", "+= 1 else: out += ' ' i += 1", "print(self): height = 0 index = 0 out = ''", "indexes # Usefull functions def swap(self, i, j): x =", "continue if cmd[0] == 'add': heap.add(int(cmd[1]), cmd[2]) elif cmd[0] ==", "self.indexes[0] = self.indexes.pop() # set top item index to 0", "(lineLen - index) print(out[0:-1]) else: print(out, end='') cycle = True", "1 key = self.indexes[i] out += '[' + str(key) +", "i): if (i - 1)/2 >= 0: return True return", "// to know indexes # Usefull functions def swap(self, i,", "self.items[key] if temp[1] < len(self.indexes): # dont heapify if deleted", "raise(Exception) key = self.indexes[0] print(key, '0', self.items[key][0]) def max(self): if", "'set': heap.set(int(cmd[1]), cmd[2]) elif cmd[0] == 'delete': heap.delete(int(cmd[1])) elif cmd[0]", "0: out += ' ' + str(self.indexes[self.parentIndex(i)]) out += ']'", "1 item y = self.indexes[j] # key of 2 item", "self.hasParent(index) and self.bigger(self.parentIndex(index), index): self.swap(self.parentIndex(index), index) index = self.parentIndex(index) def", "not temp: raise(Exception) self.items[key] = (data, temp[1]) def delete(self, key):", "key): temp = self.items.get(key, None) if temp: print('1', temp[1], temp[0])", "'add': heap.add(int(cmd[1]), cmd[2]) elif cmd[0] == 'set': heap.set(int(cmd[1]), cmd[2]) elif", "self.items[maxKey][0]) def extract(self): if len(self.indexes) == 0: raise(Exception) rootKey =", "deleted last element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self, key): temp =", "index of deleted self.items.update({lastKey: (last[0], temp[1])}) # set key of", "self.bigger(self.leftIndex(index), self.rightIndex(index)): smaller = self.rightIndex(index) if self.bigger(smaller, index): break else:", "if maxKey < self.indexes[i]: maxKey = self.indexes[i] index = i", "= i i += 1 print(maxKey, index, self.items[maxKey][0]) def extract(self):", "if temp: print('1', temp[1], temp[0]) else: print('0') def min(self): if", "1: self.indexes[0] = self.indexes.pop() # set top item index to", "end='') cycle = True heap = Heap() while cycle: try:", "self.swap(index, smaller) index = smaller # all needed methods def", "self.indexes.append(key) self.heapifyUp() def set(self, key, data): temp = self.items.get(key, None)", "<< height index += 1 key = self.indexes[i] out +=", "smaller = self.rightIndex(index) if self.bigger(smaller, index): break else: self.swap(index, smaller)", "Heap: def __init__(self): self.items = dict() # key - (value,", "del self.items[rootKey] if len(self.indexes) > 1: self.indexes[0] = self.indexes.pop() #", "in dictionary self.items.update({x: (self.items[x][0], self.items[y][1])}) self.items.update({y: (self.items[y][0], temp)}) def bigger(self,", "out = '' i = 0 if len(self.indexes) == 0:", "= 0 index = 0 out = '' i =", "== 'delete': heap.delete(int(cmd[1])) elif cmd[0] == 'search': heap.search(int(cmd[1])) elif cmd[0]", "temp[1] < len(self.indexes): # dont heapify if deleted last element", "array self.indexes[i] = y self.indexes[j] = x temp = self.items[x][1]", "= self.rightIndex(index) if self.bigger(smaller, index): break else: self.swap(index, smaller) index", "self.items.get(key, None) if not temp: raise(Exception) if len(self.indexes) > 1:", "i: index = i else: index = len(self.indexes) - 1", "else: raise(Exception) except Exception: print('error') continue except Exception: cycle =", "0 and index < lineLen: out += '_ ' *", "len(self.indexes) == 0: raise(Exception) i = int(len(self.indexes)/2) maxKey = self.indexes[i]", "self.items.get(key, None) if temp: print('1', temp[1], temp[0]) else: print('0') def", "smaller # all needed methods def add(self, key, data): if", "and cmd[0] == '' and cmd[1] == '': continue if", "self.items[key][0] if height != 0: out += ' ' +", "= 1 << height index += 1 key = self.indexes[i]", "index of 1 item # swap indexes in dictionary self.items.update({x:", "cmd[0] == 'extract': heap.extract() elif cmd[0] == 'print': heap.print() else:", "input() cmd = line.split(' ', 2) try: if len(cmd) ==", "cmd[0] == 'max': heap.max() elif cmd[0] == 'extract': heap.extract() elif", "(value, index) self.indexes = [] # index - key //", "leftIndex(self, i): return int(i*2 + 1) def hasRight(self, i): if", "dict() # key - (value, index) self.indexes = [] #", "(self.items[y][0], temp)}) def bigger(self, i, j): if self.indexes[i] <= self.indexes[j]:", "if len(self.indexes) == 0: raise(Exception) key = self.indexes[0] print(key, '0',", "key - (value, index) self.indexes = [] # index -", "raise(Exception) if len(self.indexes) > 1: lastKey = self.indexes[-1] last =", "self.parentIndex(index) def heapifyDown(self, i=0): index = i while self.hasLeft(index): smaller", "def __init__(self): self.items = dict() # key - (value, index)", "while self.hasLeft(index): smaller = self.leftIndex(index) if self.hasRight(index) and self.bigger(self.leftIndex(index), self.rightIndex(index)):", "= True heap = Heap() while cycle: try: line =", "'max': heap.max() elif cmd[0] == 'extract': heap.extract() elif cmd[0] ==", "element self.heapifyDown(i=temp[1]) self.heapifyUp(i=temp[1]) def search(self, key): temp = self.items.get(key, None)", "temp: print('1', temp[1], temp[0]) else: print('0') def min(self): if len(self.indexes)", "i += 1 if index != 0 and index <", "cmd[0] == 'print': heap.print() else: raise(Exception) except Exception: print('error') continue", "return int(i*2 + 2) # heapifys def heapifyUp(self, i=None): if", "+= 1 key = self.indexes[i] out += '[' + str(key)", "index = smaller # all needed methods def add(self, key,", "== 1 and cmd[0] == '': continue if len(cmd) ==", "< len(self.indexes): if maxKey < self.indexes[i]: maxKey = self.indexes[i] index", "self.indexes.pop() del self.items[key] if temp[1] < len(self.indexes): # dont heapify", "== '': continue if len(cmd) == 2 and cmd[0] ==", "cycle: try: line = input() cmd = line.split(' ', 2)", "key, data): temp = self.items.get(key, None) if not temp: raise(Exception)", "+ 1) def hasRight(self, i): if i*2 + 2 <", "# index of 1 item # swap indexes in dictionary", "self.indexes[j] # key of 2 item # swap keys in", "def hasRight(self, i): if i*2 + 2 < len(self.indexes): return", "lineLen: out += '\\n' index = 0 height += 1", "hasRight(self, i): if i*2 + 2 < len(self.indexes): return True", "'_\\n' print('_') return while i < len(self.indexes): lineLen = 1", "+= ' ' + str(self.indexes[self.parentIndex(i)]) out += ']' if index" ]
[ "= request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user: force_authenticate(request, user=auth_user) view = CommentCreate.as_view()", "1, \"order\": 1, \"comment\": \"Es war einmal eine kleine...\", \"honeypot\":", "1, \"comment\": \"Es war einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial)", "authenticated user, but the user has no mail address. self.user", "\"Bob\", \"email\": \"<EMAIL>\", \"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\":", "= post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name' in response.data) self.assertTrue('email' in", "setUp(self): patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start() self.article = Article.objects.create(", "APIRequestFactory() def post_comment(data, auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user:", "unicode_literals try: from unittest.mock import patch except ImportError: from mock", "import ContentType from django.test import TestCase from django.urls import reverse", "TestCase from django.urls import reverse from rest_framework.test import APIRequestFactory, force_authenticate", "unittest.mock import patch except ImportError: from mock import patch from", "data.update(self.form.initial) response = post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name' in response.data)", "from django_comments_xtd.tests.models import Article, Diary request_factory = APIRequestFactory() def post_comment(data,", "\"\", \"email\": \"\", \"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\":", "\"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\": 1, \"comment\": \"Es", "User.objects.create_user(\"bob\", \"\", \"pwd\") data = {\"name\": \"\", \"email\": \"\", \"followup\":", "if auth_user: force_authenticate(request, user=auth_user) view = CommentCreate.as_view() return view(request) class", "response = post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name' in response.data) self.assertTrue('email'", "__future__ import unicode_literals try: from unittest.mock import patch except ImportError:", "def setUp(self): patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start() self.article =", "mail address. self.user = User.objects.create_user(\"bob\", \"\", \"pwd\") data = {\"name\":", "self.article = Article.objects.create( title=\"October\", slug=\"october\", body=\"What I did on October...\")", "patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start() self.article = Article.objects.create( title=\"October\",", "from django_comments_xtd.api.views import CommentCreate from django_comments_xtd.tests.models import Article, Diary request_factory", "# It uses an authenticated user, but the user has", "django_comments_xtd import django_comments from django_comments_xtd.api.views import CommentCreate from django_comments_xtd.tests.models import", "CommentCreate from django_comments_xtd.tests.models import Article, Diary request_factory = APIRequestFactory() def", "patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start() self.article = Article.objects.create( title=\"October\", slug=\"october\", body=\"What", "class CommentCreateTestCase(TestCase): def setUp(self): patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start()", "from rest_framework.test import APIRequestFactory, force_authenticate from django_comments_xtd import django_comments from", "User from django.contrib.contenttypes.models import ContentType from django.test import TestCase from", "from unittest.mock import patch except ImportError: from mock import patch", "CommentCreateTestCase(TestCase): def setUp(self): patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start() self.article", "0, \"level\": 1, \"order\": 1, \"comment\": \"Es war einmal eine", "import User from django.contrib.contenttypes.models import ContentType from django.test import TestCase", "\"comment\": \"Es war einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response", "import unicode_literals try: from unittest.mock import patch except ImportError: from", "from django.contrib.contenttypes.models import ContentType from django.test import TestCase from django.urls", "from django.test import TestCase from django.urls import reverse from rest_framework.test", "data = {\"name\": \"Bob\", \"email\": \"<EMAIL>\", \"followup\": True, \"reply_to\": 0,", "\"order\": 1, \"comment\": \"Es war einmal eine kleine...\", \"honeypot\": \"\"}", "slug=\"october\", body=\"What I did on October...\") self.form = django_comments.get_form()(self.article) def", "import patch except ImportError: from mock import patch from django.contrib.auth.models", "ContentType from django.test import TestCase from django.urls import reverse from", "test_post_returns_2xx_response(self): data = {\"name\": \"Bob\", \"email\": \"<EMAIL>\", \"followup\": True, \"reply_to\":", "test_post_returns_4xx_response(self): # It uses an authenticated user, but the user", "post_comment(data, auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user: force_authenticate(request, user=auth_user)", "view(request) class CommentCreateTestCase(TestCase): def setUp(self): patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer =", "view = CommentCreate.as_view() return view(request) class CommentCreateTestCase(TestCase): def setUp(self): patcher", "user, but the user has no mail address. self.user =", "eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data) self.assertEqual(response.status_code, 204)", "kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count,", "mock import patch from django.contrib.auth.models import User from django.contrib.contenttypes.models import", "{\"name\": \"Bob\", \"email\": \"<EMAIL>\", \"followup\": True, \"reply_to\": 0, \"level\": 1,", "uses an authenticated user, but the user has no mail", "patcher.start() self.article = Article.objects.create( title=\"October\", slug=\"october\", body=\"What I did on", "patch from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from", "reverse from rest_framework.test import APIRequestFactory, force_authenticate from django_comments_xtd import django_comments", "address. self.user = User.objects.create_user(\"bob\", \"\", \"pwd\") data = {\"name\": \"\",", "user has no mail address. self.user = User.objects.create_user(\"bob\", \"\", \"pwd\")", "\"email\": \"\", \"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\": 1,", "war einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data)", "CommentCreate.as_view() return view(request) class CommentCreateTestCase(TestCase): def setUp(self): patcher = patch('django_comments_xtd.views.send_mail')", "= {\"name\": \"\", \"email\": \"\", \"followup\": True, \"reply_to\": 0, \"level\":", "title=\"October\", slug=\"october\", body=\"What I did on October...\") self.form = django_comments.get_form()(self.article)", "= Article.objects.create( title=\"October\", slug=\"october\", body=\"What I did on October...\") self.form", "It uses an authenticated user, but the user has no", "self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self): # It uses an authenticated user,", "post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self): # It uses", "October...\") self.form = django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data = {\"name\": \"Bob\",", "import TestCase from django.urls import reverse from rest_framework.test import APIRequestFactory,", "\"Es war einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response =", "= post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self): # It", "import Article, Diary request_factory = APIRequestFactory() def post_comment(data, auth_user=None): request", "war einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data,", "kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400)", "data) if auth_user: force_authenticate(request, user=auth_user) view = CommentCreate.as_view() return view(request)", "\"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1)", "try: from unittest.mock import patch except ImportError: from mock import", "= patch('django_comments_xtd.views.send_mail') self.mock_mailer = patcher.start() self.article = Article.objects.create( title=\"October\", slug=\"october\",", "204) self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self): # It uses an authenticated", "from django_comments_xtd import django_comments from django_comments_xtd.api.views import CommentCreate from django_comments_xtd.tests.models", "return view(request) class CommentCreateTestCase(TestCase): def setUp(self): patcher = patch('django_comments_xtd.views.send_mail') self.mock_mailer", "def test_post_returns_2xx_response(self): data = {\"name\": \"Bob\", \"email\": \"<EMAIL>\", \"followup\": True,", "data.update(self.form.initial) response = post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self):", "django.urls import reverse from rest_framework.test import APIRequestFactory, force_authenticate from django_comments_xtd", "\"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name'", "auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user: force_authenticate(request, user=auth_user) view", "\"reply_to\": 0, \"level\": 1, \"order\": 1, \"comment\": \"Es war einmal", "import APIRequestFactory, force_authenticate from django_comments_xtd import django_comments from django_comments_xtd.api.views import", "def post_comment(data, auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user: force_authenticate(request,", "from __future__ import unicode_literals try: from unittest.mock import patch except", "import patch from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType", "user=auth_user) view = CommentCreate.as_view() return view(request) class CommentCreateTestCase(TestCase): def setUp(self):", "Article.objects.create( title=\"October\", slug=\"october\", body=\"What I did on October...\") self.form =", "force_authenticate from django_comments_xtd import django_comments from django_comments_xtd.api.views import CommentCreate from", "but the user has no mail address. self.user = User.objects.create_user(\"bob\",", "post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name' in response.data) self.assertTrue('email' in response.data)", "{\"name\": \"\", \"email\": \"\", \"followup\": True, \"reply_to\": 0, \"level\": 1,", "django_comments from django_comments_xtd.api.views import CommentCreate from django_comments_xtd.tests.models import Article, Diary", "= CommentCreate.as_view() return view(request) class CommentCreateTestCase(TestCase): def setUp(self): patcher =", "einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data, auth_user=self.user)", "\"level\": 1, \"order\": 1, \"comment\": \"Es war einmal eine kleine...\",", "except ImportError: from mock import patch from django.contrib.auth.models import User", "\"\", \"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\": 1, \"comment\":", "Article, Diary request_factory = APIRequestFactory() def post_comment(data, auth_user=None): request =", "\"\"} data.update(self.form.initial) response = post_comment(data, auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name' in", "import django_comments from django_comments_xtd.api.views import CommentCreate from django_comments_xtd.tests.models import Article,", "= User.objects.create_user(\"bob\", \"\", \"pwd\") data = {\"name\": \"\", \"email\": \"\",", "import reverse from rest_framework.test import APIRequestFactory, force_authenticate from django_comments_xtd import", "response = post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self): #", "def test_post_returns_4xx_response(self): # It uses an authenticated user, but the", "no mail address. self.user = User.objects.create_user(\"bob\", \"\", \"pwd\") data =", "auth_user=self.user) self.assertEqual(response.status_code, 400) self.assertTrue('name' in response.data) self.assertTrue('email' in response.data) self.assertEqual(self.mock_mailer.call_count,", "patch except ImportError: from mock import patch from django.contrib.auth.models import", "rest_framework.test import APIRequestFactory, force_authenticate from django_comments_xtd import django_comments from django_comments_xtd.api.views", "auth_user: force_authenticate(request, user=auth_user) view = CommentCreate.as_view() return view(request) class CommentCreateTestCase(TestCase):", "self.user = User.objects.create_user(\"bob\", \"\", \"pwd\") data = {\"name\": \"\", \"email\":", "django_comments_xtd.api.views import CommentCreate from django_comments_xtd.tests.models import Article, Diary request_factory =", "= APIRequestFactory() def post_comment(data, auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'), data) if", "django.contrib.contenttypes.models import ContentType from django.test import TestCase from django.urls import", "request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user: force_authenticate(request, user=auth_user) view = CommentCreate.as_view() return", "body=\"What I did on October...\") self.form = django_comments.get_form()(self.article) def test_post_returns_2xx_response(self):", "\"email\": \"<EMAIL>\", \"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\": 1,", "has no mail address. self.user = User.objects.create_user(\"bob\", \"\", \"pwd\") data", "the user has no mail address. self.user = User.objects.create_user(\"bob\", \"\",", "Diary request_factory = APIRequestFactory() def post_comment(data, auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'),", "self.mock_mailer = patcher.start() self.article = Article.objects.create( title=\"October\", slug=\"october\", body=\"What I", "eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data, auth_user=self.user) self.assertEqual(response.status_code,", "from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.test", "import CommentCreate from django_comments_xtd.tests.models import Article, Diary request_factory = APIRequestFactory()", "did on October...\") self.form = django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data =", "self.form = django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data = {\"name\": \"Bob\", \"email\":", "self.assertEqual(response.status_code, 400) self.assertTrue('name' in response.data) self.assertTrue('email' in response.data) self.assertEqual(self.mock_mailer.call_count, 0)", "django_comments_xtd.tests.models import Article, Diary request_factory = APIRequestFactory() def post_comment(data, auth_user=None):", "django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.test import", "an authenticated user, but the user has no mail address.", "ImportError: from mock import patch from django.contrib.auth.models import User from", "\"<EMAIL>\", \"followup\": True, \"reply_to\": 0, \"level\": 1, \"order\": 1, \"comment\":", "= {\"name\": \"Bob\", \"email\": \"<EMAIL>\", \"followup\": True, \"reply_to\": 0, \"level\":", "from mock import patch from django.contrib.auth.models import User from django.contrib.contenttypes.models", "django.test import TestCase from django.urls import reverse from rest_framework.test import", "request_factory = APIRequestFactory() def post_comment(data, auth_user=None): request = request_factory.post(reverse('comments-xtd-api-create'), data)", "\"pwd\") data = {\"name\": \"\", \"email\": \"\", \"followup\": True, \"reply_to\":", "1) def test_post_returns_4xx_response(self): # It uses an authenticated user, but", "from django.urls import reverse from rest_framework.test import APIRequestFactory, force_authenticate from", "self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1) def test_post_returns_4xx_response(self): # It uses an", "on October...\") self.form = django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data = {\"name\":", "\"\", \"pwd\") data = {\"name\": \"\", \"email\": \"\", \"followup\": True,", "= django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data = {\"name\": \"Bob\", \"email\": \"<EMAIL>\",", "request = request_factory.post(reverse('comments-xtd-api-create'), data) if auth_user: force_authenticate(request, user=auth_user) view =", "True, \"reply_to\": 0, \"level\": 1, \"order\": 1, \"comment\": \"Es war", "\"\"} data.update(self.form.initial) response = post_comment(data) self.assertEqual(response.status_code, 204) self.assertEqual(self.mock_mailer.call_count, 1) def", "data = {\"name\": \"\", \"email\": \"\", \"followup\": True, \"reply_to\": 0,", "APIRequestFactory, force_authenticate from django_comments_xtd import django_comments from django_comments_xtd.api.views import CommentCreate", "force_authenticate(request, user=auth_user) view = CommentCreate.as_view() return view(request) class CommentCreateTestCase(TestCase): def", "django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data = {\"name\": \"Bob\", \"email\": \"<EMAIL>\", \"followup\":", "I did on October...\") self.form = django_comments.get_form()(self.article) def test_post_returns_2xx_response(self): data", "einmal eine kleine...\", \"honeypot\": \"\"} data.update(self.form.initial) response = post_comment(data) self.assertEqual(response.status_code,", "= patcher.start() self.article = Article.objects.create( title=\"October\", slug=\"october\", body=\"What I did" ]
[ "keeps the leftmost valid index of a char. res=[] #", "with max # and be valid c=key if not c:", "for c in str: d[c]+=1 # create a valid dict", "chars d=collections.defaultdict(int) for c in str: d[c]+=1 # create a", "max # and be valid c=key if not c: return", "of chars d=collections.defaultdict(int) for c in str: d[c]+=1 # create", "# get c with max # and be valid c=key", "one, that with max # first, must have valid leftmost", "c or d[key]>d[c]) and d[key]>0 and v[key]<=i: # get c", ":type k: int :rtype: str \"\"\" ## greedy: count keeps", "c=None for key in d: if (not c or d[key]>d[c])", "index of a char. res=[] # count # of chars", "range(len(str)): c=None for key in d: if (not c or", "i in range(len(str)): c=None for key in d: if (not", "(not c or d[key]>d[c]) and d[key]>0 and v[key]<=i: # get", "get c with max # and be valid c=key if", "valid keeps the leftmost valid index of a char. res=[]", "create a valid dict v=collections.defaultdict(int) # add char one by", "char. res=[] # count # of chars d=collections.defaultdict(int) for c", "count keeps the # of chars, valid keeps the leftmost", "v=collections.defaultdict(int) # add char one by one, that with max", "count # of chars d=collections.defaultdict(int) for c in str: d[c]+=1", "str \"\"\" ## greedy: count keeps the # of chars,", "# of chars d=collections.defaultdict(int) for c in str: d[c]+=1 #", "d: if (not c or d[key]>d[c]) and d[key]>0 and v[key]<=i:", "a valid dict v=collections.defaultdict(int) # add char one by one,", "in range(len(str)): c=None for key in d: if (not c", "# count # of chars d=collections.defaultdict(int) for c in str:", "d[key]>d[c]) and d[key]>0 and v[key]<=i: # get c with max", "## greedy: count keeps the # of chars, valid keeps", "k: int :rtype: str \"\"\" ## greedy: count keeps the", "# first, must have valid leftmost index for i in", "d[key]>0 and v[key]<=i: # get c with max # and", "if (not c or d[key]>d[c]) and d[key]>0 and v[key]<=i: #", "of chars, valid keeps the leftmost valid index of a", "Solution(object): def rearrangeString(self, str, k): \"\"\" :type str: str :type", "# create a valid dict v=collections.defaultdict(int) # add char one", "have valid leftmost index for i in range(len(str)): c=None for", "first, must have valid leftmost index for i in range(len(str)):", "valid c=key if not c: return '' res.append(c) d[c]-=1 v[c]=i+k", "in str: d[c]+=1 # create a valid dict v=collections.defaultdict(int) #", "max # first, must have valid leftmost index for i", "of a char. res=[] # count # of chars d=collections.defaultdict(int)", "int :rtype: str \"\"\" ## greedy: count keeps the #", "by one, that with max # first, must have valid", "c=key if not c: return '' res.append(c) d[c]-=1 v[c]=i+k return", "for i in range(len(str)): c=None for key in d: if", "str: str :type k: int :rtype: str \"\"\" ## greedy:", "char one by one, that with max # first, must", "# and be valid c=key if not c: return ''", "for key in d: if (not c or d[key]>d[c]) and", "and d[key]>0 and v[key]<=i: # get c with max #", "key in d: if (not c or d[key]>d[c]) and d[key]>0", "that with max # first, must have valid leftmost index", "chars, valid keeps the leftmost valid index of a char.", "add char one by one, that with max # first,", "\"\"\" :type str: str :type k: int :rtype: str \"\"\"", "keeps the # of chars, valid keeps the leftmost valid", "the leftmost valid index of a char. res=[] # count", "be valid c=key if not c: return '' res.append(c) d[c]-=1", "in d: if (not c or d[key]>d[c]) and d[key]>0 and", "and be valid c=key if not c: return '' res.append(c)", ":rtype: str \"\"\" ## greedy: count keeps the # of", "valid index of a char. res=[] # count # of", "with max # first, must have valid leftmost index for", "def rearrangeString(self, str, k): \"\"\" :type str: str :type k:", "if not c: return '' res.append(c) d[c]-=1 v[c]=i+k return ''.join(res)", ":type str: str :type k: int :rtype: str \"\"\" ##", "leftmost index for i in range(len(str)): c=None for key in", "class Solution(object): def rearrangeString(self, str, k): \"\"\" :type str: str", "\"\"\" ## greedy: count keeps the # of chars, valid", "a char. res=[] # count # of chars d=collections.defaultdict(int) for", "or d[key]>d[c]) and d[key]>0 and v[key]<=i: # get c with", "greedy: count keeps the # of chars, valid keeps the", "res=[] # count # of chars d=collections.defaultdict(int) for c in", "valid leftmost index for i in range(len(str)): c=None for key", "d[c]+=1 # create a valid dict v=collections.defaultdict(int) # add char", "and v[key]<=i: # get c with max # and be", "c in str: d[c]+=1 # create a valid dict v=collections.defaultdict(int)", "str :type k: int :rtype: str \"\"\" ## greedy: count", "str, k): \"\"\" :type str: str :type k: int :rtype:", "d=collections.defaultdict(int) for c in str: d[c]+=1 # create a valid", "one by one, that with max # first, must have", "v[key]<=i: # get c with max # and be valid", "str: d[c]+=1 # create a valid dict v=collections.defaultdict(int) # add", "k): \"\"\" :type str: str :type k: int :rtype: str", "# of chars, valid keeps the leftmost valid index of", "rearrangeString(self, str, k): \"\"\" :type str: str :type k: int", "dict v=collections.defaultdict(int) # add char one by one, that with", "index for i in range(len(str)): c=None for key in d:", "# add char one by one, that with max #", "the # of chars, valid keeps the leftmost valid index", "c with max # and be valid c=key if not", "must have valid leftmost index for i in range(len(str)): c=None", "leftmost valid index of a char. res=[] # count #", "valid dict v=collections.defaultdict(int) # add char one by one, that" ]
[ "self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3]", "self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations self.eps", "min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return z_max,", "= [eff_traj_poly[name][i].deval(it * dt) for i in range(3)] # HACK:", "generate_eff_traj(contacts, z_offset): effs = contacts.keys() eff_traj_poly = {} for eff", "planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting if endeff_traj_generator", "for j in range(num_contacts): contact_ = contact_states(i)[j] start_time = contact_.start_time", "endeffector on the ground. t = [cnt[i].start_time(), cnt[i].end_time()] for idx", "self.poly_traj = [] for i in range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj,", "endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact = np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0,", "len(contact_states(i)) contacts[eff] = [] for j in range(num_contacts): contact_ =", "Compute the endeffector position and velocity trajectories. endeff_pos_ref = np.zeros((num_time_steps,", "0, 0.]).T)) quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref =", "= len(contact_states(i)) contacts[eff] = [] for j in range(num_contacts): contact_", "self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q, dq) dq = self.inv_kin.compute( q, dq,", "num_eff = len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names)", "t = [cnt[i].end_time(), cnt[i+1].start_time()] for idx in range(3): via =", "self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff = { 'trajectory': np.zeros((self.num_time_steps, 3", "file not same as required for robot\\n' + 'Got %d", "contacts = {} for i, eff in enumerate(effs): num_contacts =", "range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact = np.ones(init_state.effNum()) quad_goal =", "for i, eff in enumerate(effs): num_contacts = len(contact_states(i)) contacts[eff] =", "import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class Contact(object): def", "* self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3", "position(self): return self.pos def start_time(self): return self.init_time def end_time(self): return", "return contacts def generate_eff_traj(contacts, z_offset): effs = contacts.keys() eff_traj_poly =", "self.num_time_steps = None self.q_init = None self.poly_traj = None def", "\\ PlannerDoubleParam_TimeStep class Contact(object): def __init__(self, position, start_time, end_time): self.pos", "eff in effs: cnt = contacts[eff] num_contacts = len(cnt) poly_traj", "in range(3): via = None if idx == 2: via", "= [cnt[i].start_time(), cnt[i].end_time()] for idx in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) #", "np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'trajectory_wrt_base':", "self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 *", "t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly = poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t,", "endeff_contact = np.zeros((num_time_steps, num_eff)) for it in range(num_time_steps): for eff,", "res) if np.linalg.norm(res) < 1e-3: print('Found initial configuration after {}", "np.pi/2, -np.pi, -np.pi/2, np.pi, -np.pi/2, np.pi]).T # q_max = np.matrix([1.35,", "framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx in frames ]).reshape(-1)", "poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt, self.num_time_steps]", "range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj, [PolynominalList()]) for j in range(len(self.q_init)): for", "float) if self.q_via is None: for i in range (self.num_time_steps):", "def generate_eff_traj(contacts, z_offset): effs = contacts.keys() eff_traj_poly = {} for", "hg.angular.T self.q_kin[it] = q.T self.dq_kin[it] = dq.T # The endeffector", "0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com lmom_ref = np.zeros(3)", "= dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:] def optimize_initial_position(self, init_state): # Optimize", "# Fill the kinematics results for it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it,", "se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = (self.reg_orientation * se3.log((quad_goal *", "self.z_offset = 0.1 def get_z_bound(self, mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]),", "num_time_steps = mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate minimum", "should go to config file # q_jump = [1., 0.1,", "for idx in frames ]).reshape(-1) data = self.inv_kin.robot.data hg =", "@copyright Copyright (c) 2019, New York University and Max Planck", "= position self.init_time = start_time self.final_time = end_time def position(self):", "self.pos def start_time(self): return self.init_time def end_time(self): return self.final_time def", "trajectories eff_traj_poly = generate_eff_traj(contacts, self.z_offset) # Compute the endeffector position", "idx == 2: via = z_offset + cnt[i].position()[idx] poly =", "self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff in self.robot.effs] self.inv_kin =", "'Got %d joints but robot expects %d joints.' % (", "optimize_initial_position(self, init_state): # Optimize the initial configuration q = se3.neutral(self.robot.model)", "= [q_via[i-1,0]/self.dt, self.num_time_steps] poly = poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly)", "the kinematic sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it] kinematic_state.lmom", "self.inv_kin.compute(q, dq, com_ref, lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact, None) q", "= dq.copy() def optimize(self, init_state, contact_sequence, dynamic_sequence, plotting=False): self.init_state =", "# print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this is", "trajectories. endeff_pos_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref = np.zeros((num_time_steps, num_eff,", "framesVel(self.hip_ids) # Storing on the kinematic sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it]", "= contact_.end_time position = contact_.position contacts[eff].append(Contact(position, start_time, end_time)) return contacts", "PolynominalList(), PolynominalList() ] for i in range(num_contacts): # Create a", "range(3)] # HACK: If the velocity is zero, assume the", "jerk trajectories eff_traj_poly = generate_eff_traj(contacts, self.z_offset) # Compute the endeffector", "len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height + 0.32", "is zero, assume the endeffector is in # contact with", "frames ]).reshape(-1) data = self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q, dq) #", "None) q = se3.integrate(self.robot.model, q, res) if np.linalg.norm(res) < 1e-3:", "setup.') print(\"initial configuration: \\n\", q) self.q_init = q.copy() self.dq_init =", "3 * self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)) } def", "* se3.log((quad_goal * quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position *", "class Contact(object): def __init__(self, position, start_time, end_time): self.pos = position", "after {} iterations'.format(iters + 1)) break if iters == self.max_iterations", "if np.linalg.norm(res) < 1e-3: print('Found initial configuration after {} iterations'.format(iters", "import PointContactInverseKinematics from pinocchio import RobotWrapper import pinocchio as se3", "endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it * dt) for i in range(3)] endeff_vel_ref[it][eff]", "poly = poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly) def eval_traj(self,t): q", "return np.matrix(q) class MomentumKinematicsOptimizer(object): def __init__(self): self.q_init = None self.dq_init", "endeff_pos_ref, endeff_vel_ref, endeff_contact, None) q = se3.integrate(self.robot.model, q, res) if", "[cnt[i].end_time(), cnt[i+1].start_time()] for idx in range(3): via = None if", "def fill_kinematic_result(self, it, q, dq): def framesPos(frames): return np.vstack([data.oMf[idx].translation for", "endeff_vel_ref [0]: endeff_pos_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] [1]:", "effs = contacts.keys() eff_traj_poly = {} for eff in effs:", "= contact_.position contacts[eff].append(Contact(position, start_time, end_time)) return contacts def generate_eff_traj(contacts, z_offset):", "[0]: endeff_pos_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] [1]: endeff_vel_ref:", "dt) for i in range(3)] # HACK: If the velocity", "= RobotWrapper() self.reset() # Holds dynamics and kinematics results self.com_dyn", "in self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name) for name in self.hip_names] self.eff_names", "poly_traj[idx].append(t, poly) eff_traj_poly[eff] = poly_traj # returns end eff trajectories", "* self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)) } def fill_data_from_dynamics(self):", "for robot\\n' + 'Got %d joints but robot expects %d", "if self.q_via is None: for i in range (self.num_time_steps): self.joint_des[:,i]", "PolynominalList(), PolynominalList(), PolynominalList() ] for i in range(num_contacts): # Create", "self.q_kin[it] = q.T self.dq_kin[it] = dq.T # The endeffector informations", "dq, com_ref, lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact, None) q =", "q_via[0,0]/self.dt] poly = poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t", "def __init__(self): self.dt =.01 self.num_time_steps = None self.q_init = None", "fill_kinematic_result(self, it, q, dq): def framesPos(frames): return np.vstack([data.oMf[idx].translation for idx", "small P controller for the base orientation to always start", "= generate_eff_traj(contacts, self.z_offset) # Compute the endeffector position and velocity", "as se3 from pinocchio.utils import zero from pymomentum import *", "= len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) #", "self.dt =.01 self.num_time_steps = None self.q_init = None self.poly_traj =", "0.1, -0.2 ,0.1, -0.2 ,-0.1, 0.2 ,-0.1, 0.2] # q_via", "for i in range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact =", "np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)) }", "\"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this is for jump,", "-.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T # q_via0 =", "cnt[i].end_time()] for idx in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If there", "# the two contact points. if i < num_contacts -", "q = np.zeros((1,len(self.q_init)),float) for j in range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t)", "orientation to always start with flat # oriented base. quad_q", "position and velocity trajectories. endeff_pos_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref", "= 1e-1 * se3.log((quad_goal * quad_q.inverse()).matrix()) res = self.inv_kin.compute(q, dq,", "# Storing on the internal array. self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it]", "the initial configuration only once. if self.q_init is None: self.optimize_initial_position(init_state)", "PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff = { 'trajectory': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)),", "lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact, None) q = se3.integrate(self.robot.model, q,", "configuration after {} iterations'.format(iters + 1)) break if iters ==", "= [cnt[i].end_time(), cnt[i+1].start_time()] for idx in range(3): via = None", "self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via is None: for", "q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt, self.num_time_steps] poly =", "and kinematics results self.com_dyn = np.zeros((self.num_time_steps, 3)) self.lmom_dyn = np.zeros((self.num_time_steps,", "eff_traj_poly = {} for eff in effs: cnt = contacts[eff]", "QpSolver from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from pinocchio import RobotWrapper import", "len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate", "= PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff = { 'trajectory': np.zeros((self.num_time_steps, 3 *", "= self.q_init.copy(), self.dq_init.copy() for it in range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0,", "= [eff_traj_poly[name][i].eval(it * dt) for i in range(3)] endeff_vel_ref[it][eff] =", "= q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities =", "self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q, dq) # Storing on the internal", "import * from pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\", "from momentumopt.kinoptpy.qp import QpSolver from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from pinocchio", "eff in self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name) for name in self.hip_names]", "\\n\", q) self.q_init = q.copy() self.dq_init = dq.copy() def optimize(self,", "contact_.position contacts[eff].append(Contact(position, start_time, end_time)) return contacts def generate_eff_traj(contacts, z_offset): effs", "desired joint trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) #", "poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly) else: t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt]", "z}] [1]: endeff_vel_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] '''", "= q[3:7] kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity =", ": ]) # joint_regularization_ref = self.reg_joint_position * (self.q_init[7 : ]", "# Integrate to the next state. q = se3.integrate(self.robot.model, q,", "== 0.): endeff_contact[it][eff] = 1. else: endeff_contact[it][eff] = 0. return", "q[2] = self.robot.floor_height + 0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref =", "= poly_traj # returns end eff trajectories return eff_traj_poly class", "range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom", "from pinocchio.utils import zero from pymomentum import * from momentumopt.quadruped.quadruped_wrapper", "[q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly = poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly) def", "q[:3] kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3]", "zero from pymomentum import * from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from", "end_time(self): return self.final_time def get_contact_plan(contact_states, effs): contacts = {} for", "range(len(self.q_init)): for i in range (len(q_via[:,0])+1): if i==0: t =", "= se3.integrate(self.robot.model, q, res) if np.linalg.norm(res) < 1e-3: print('Found initial", "num_eff, 3)) endeff_contact = np.zeros((num_time_steps, num_eff)) for it in range(num_time_steps):", "np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] [1]: endeff_vel_ref: np.array, shape=[num_time_steps,", "= None self.dq_init = None self.reg_orientation = 1e-2 self.reg_joint_position =", "+ 1)) break if iters == self.max_iterations - 1: print('Failed", "in range (self.num_time_steps): self.joint_des[:,i] = self.q_init[7 : ].T else: joint_traj_gen.joint_traj(self.q_via)", "constant_poly(cnt[i].position()[idx])) # If there is a contact following, add the", "configuration only once. if self.q_init is None: self.optimize_initial_position(init_state) # Get", "Generate minimum jerk trajectories eff_traj_poly = generate_eff_traj(contacts, self.z_offset) # Compute", "self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T - q[7 : ]) # joint_regularization_ref =", "1e-1 * se3.log((quad_goal * quad_q.inverse()).matrix()) res = self.inv_kin.compute(q, dq, com_ref,", "BSD-3-Clause @copyright Copyright (c) 2019, New York University and Max", "following, add the transition between # the two contact points.", "t = [0, q_via[0,0]/self.dt] poly = poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t,", "self.z_offset) # Compute the endeffector position and velocity trajectories. endeff_pos_ref", "= poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly) else: t = [q_via[i-1,0]/self.dt,", "self.poly_traj = None def joint_traj(self, q_via): self.poly_traj = [] for", "endeff_contact class JointTrajectoryGenerator(object): def __init__(self): self.dt =.01 self.num_time_steps = None", "in range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] =", "- framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids) # Storing", "kinematic_state.robot_velocity.joint_velocities = dq[6:] def optimize_initial_position(self, init_state): # Optimize the initial", "\\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids) # Storing on the kinematic sequence.", "-np.pi, np.pi/2, -np.pi, -np.pi/2, np.pi, -np.pi/2, np.pi]).T # q_max =", "def __call__(self, mom_kin_optimizer): ''' Computes the endeffector positions and velocities.", "self.inv_kin.compute( q, dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref)", "# Generate minimum jerk trajectories eff_traj_poly = generate_eff_traj(contacts, self.z_offset) #", "self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref,", "1. else: endeff_contact[it][eff] = 0. return endeff_pos_ref, endeff_vel_ref, endeff_contact class", "def fill_data_from_dynamics(self): # The centroidal information for it in range(self.num_time_steps):", "3)) self.amom_dyn = np.zeros((self.num_time_steps, 3)) self.com_kin = np.zeros((self.num_time_steps, 3)) self.lmom_kin", "= ['{}_HFE'.format(eff) for eff in self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name) for", "== 2: via = z_offset + cnt[i].position()[idx] poly = poly_points(t,", "fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact = \\ self.endeff_traj_generator(self) def fill_kinematic_result(self, it,", "contact_ = contact_states(i)[j] start_time = contact_.start_time end_time = contact_.end_time position", "in enumerate(effs): num_contacts = len(contact_states(i)) contacts[eff] = [] for j", "se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7] = quad_goal.coeffs() for iters in range(self.max_iterations):", "= self.poly_traj[j].eval(t) return np.matrix(q) class MomentumKinematicsOptimizer(object): def __init__(self): self.q_init =", "joint trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO:", "= len(cnt) poly_traj = [ PolynominalList(), PolynominalList(), PolynominalList() ] for", "# If there is a contact following, add the transition", "[0, q_via[0,0]/self.dt] poly = poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])):", "quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7] = quad_goal.coeffs() for iters", "in range(3)] # HACK: If the velocity is zero, assume", "self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff) for eff", "q[3:7] kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6]", "= init_state self.contact_sequence = contact_sequence self.dynamic_sequence = dynamic_sequence self.q_via =", "self.contact_sequence = contact_sequence self.dynamic_sequence = dynamic_sequence self.q_via = None #", "name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it * dt) for i", "start_time, end_time): self.pos = position self.init_time = start_time self.final_time =", "= contact_states(i)[j] start_time = contact_.start_time end_time = contact_.end_time position =", "\\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class Contact(object): def __init__(self, position, start_time,", "and velocities. Returns endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref: np.array, shape=[num_time_steps, num_eff,", "endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting if endeff_traj_generator is None: endeff_traj_generator", "a constant polynominal for endeffector on the ground. t =", "se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise", "idx in frames ]).reshape(-1) data = self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q,", "self.fill_kinematic_result(it, q, dq) dq = self.inv_kin.compute( q, dq, self.com_dyn[it], self.lmom_dyn[it],", "def __init__(self): self.q_init = None self.dq_init = None self.reg_orientation =", "cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly) eff_traj_poly[eff] = poly_traj # returns end", "* self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3", "P controller for the base orientation to always start with", "= np.zeros((self.num_time_steps, 3)) self.com_kin = np.zeros((self.num_time_steps, 3)) self.lmom_kin = np.zeros((self.num_time_steps,", "= se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5]))", "= EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps =", "velocity is zero, assume the endeffector is in # contact", "it in range(num_time_steps): for eff, name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] =", "j in range(len(self.q_init)): for i in range (len(q_via[:,0])+1): if i==0:", "in frames]).reshape(-1) def framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx", "self.inv_kin.J[6:(self.inv_kin.ne + 2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it] -", "q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly) def eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float) for", "for idx in range(3): via = None if idx ==", "= [ PolynominalList(), PolynominalList(), PolynominalList() ] for i in range(num_contacts):", "for eff, name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it * dt)", "Holds dynamics and kinematics results self.com_dyn = np.zeros((self.num_time_steps, 3)) self.lmom_dyn", "data = self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q, dq) # Storing on", "+ 2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids)", "np.array([init_state.effPosition(i) for i in range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact", "def optimize(self, init_state, contact_sequence, dynamic_sequence, plotting=False): self.init_state = init_state self.contact_sequence", "= JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init = self.q_init[7:] self.joint_des =", "i < num_contacts - 1: t = [cnt[i].end_time(), cnt[i+1].start_time()] for", "the internal array. self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T self.amom_kin[it]", "ground. t = [cnt[i].start_time(), cnt[i].end_time()] for idx in range(3): poly_traj[idx].append(t,", "def get_contact_plan(contact_states, effs): contacts = {} for i, eff in", "self.joint_des = None def reset(self): self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs))", "in range(len(self.q_init)): for i in range (len(q_via[:,0])+1): if i==0: t", "base orientation to always start with flat # oriented base.", "= self.inv_kin.compute(q, dq, com_ref, lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact, None)", "initial configuration after {} iterations'.format(iters + 1)) break if iters", "None self.reg_orientation = 1e-2 self.reg_joint_position = 2. self.joint_des = None", "= np.zeros((self.num_time_steps, 3)) self.amom_kin = np.zeros((self.num_time_steps, 3)) self.q_kin = np.zeros((self.num_time_steps,", "= self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T self.amom_kin[it] = hg.angular.T self.q_kin[it] =", "= np.zeros((num_time_steps, num_eff, 3)) endeff_contact = np.zeros((num_time_steps, num_eff)) for it", "None self.poly_traj = None def joint_traj(self, q_via): self.poly_traj = []", "3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\", "in range(self.max_iterations): # Adding small P controller for the base", "endeff_contact[it][eff] = 0. return endeff_pos_ref, endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object): def", "1)) break if iters == self.max_iterations - 1: print('Failed to", "QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import * from pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions,", "i, eff in enumerate(effs): num_contacts = len(contact_states(i)) contacts[eff] = []", "position, start_time, end_time): self.pos = position self.init_time = start_time self.final_time", "for i in range (len(q_via[:,0])+1): if i==0: t = [0,", "end_time def position(self): return self.pos def start_time(self): return self.init_time def", "self.optimize_initial_position(init_state) # Get the desired joint trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints)", "init_state.com lmom_ref = np.zeros(3) amom_ref = np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i)", "Gesellschaft. @date 2019-10-08 ''' import os import numpy as np", "in frames ]).reshape(-1) data = self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q, dq)", "# Storing on the kinematic sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com", "q[3:7] = quad_goal.coeffs() for iters in range(self.max_iterations): # Adding small", "start_time, end_time)) return contacts def generate_eff_traj(contacts, z_offset): effs = contacts.keys()", "* quad_q.inverse()).matrix()) res = self.inv_kin.compute(q, dq, com_ref, lmom_ref, amom_ref, endeff_pos_ref,", "the ground. if np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff] = 1. else:", "None def reset(self): self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self,", "1: print('Failed to converge for initial setup.') print(\"initial configuration: \\n\",", "class JointTrajectoryGenerator(object): def __init__(self): self.dt =.01 self.num_time_steps = None self.q_init", "HACK: If the velocity is zero, assume the endeffector is", "from pymomentum import * from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj", "z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound)", "2. self.joint_des = None def reset(self): self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps),", "Compute inverse kinematics over the full trajectory. self.inv_kin.is_init_time = 0", "License BSD-3-Clause @copyright Copyright (c) 2019, New York University and", "= self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation =", "for eff in effs: cnt = contacts[eff] num_contacts = len(cnt)", "self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting =", "minimum jerk trajectories eff_traj_poly = generate_eff_traj(contacts, self.z_offset) # Compute the", "= q.copy() self.dq_init = dq.copy() def optimize(self, init_state, contact_sequence, dynamic_sequence,", "as required for robot\\n' + 'Got %d joints but robot", "float(q[4]), float(q[5])) amom_ref = 1e-1 * se3.log((quad_goal * quad_q.inverse()).matrix()) res", "self.q_init[7 : ].T else: joint_traj_gen.joint_traj(self.q_via) for it in range(self.num_time_steps): self.joint_des[:,it]", "def __init__(self): self.z_offset = 0.1 def get_z_bound(self, mom_kin_optimizer): z_max =", "PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class Contact(object): def __init__(self, position,", "3={x, y, z}] [1]: endeff_vel_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y,", "self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height + 0.32 dq", "= np.zeros((self.num_time_steps, 3)) self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps,", "else: t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly = poly_points(t, q_via[i-1,j+1], q_via[i,j+1])", "if endeff_traj_generator is None: endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator", "zero, assume the endeffector is in # contact with the", "self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)) } def fill_data_from_dynamics(self): #", "The centroidal information for it in range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com", "lmom_ref = np.zeros(3) amom_ref = np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i) for", "range(self.max_iterations): # Adding small P controller for the base orientation", "= planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations self.eps =", "for jump, should go to config file # q_jump =", "= self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via is None:", "print('Found initial configuration after {} iterations'.format(iters + 1)) break if", "+ 'Got %d joints but robot expects %d joints.' %", "= None # Create array with centroidal and endeffector informations.", "= 1e-2 self.reg_joint_position = 2. self.joint_des = None def reset(self):", "= endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations =", "self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact = \\ self.endeff_traj_generator(self) def", "q_via = np.matrix([.75, np.pi/2, -np.pi, np.pi/2, -np.pi, -np.pi/2, np.pi, -np.pi/2,", "np.pi]).T # q_max = np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2,", "base. quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = 1e-1", "# Create a constant polynominal for endeffector on the ground.", "results self.com_dyn = np.zeros((self.num_time_steps, 3)) self.lmom_dyn = np.zeros((self.num_time_steps, 3)) self.amom_dyn", "if self.q_init is None: self.optimize_initial_position(init_state) # Get the desired joint", "self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def", "for name in self.hip_names] self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff", "end eff trajectories return eff_traj_poly class EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset", "if iters == self.max_iterations - 1: print('Failed to converge for", "robot\\n' + 'Got %d joints but robot expects %d joints.'", "self.com_dyn = np.zeros((self.num_time_steps, 3)) self.lmom_dyn = np.zeros((self.num_time_steps, 3)) self.amom_dyn =", "- q[7 : ]) # joint_regularization_ref = self.reg_joint_position * (self.q_init[7", "EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset = 0.1 def get_z_bound(self, mom_kin_optimizer): z_max", "np.zeros((num_time_steps, num_eff)) for it in range(num_time_steps): for eff, name in", "def framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx in frames", "amom_ref = 1e-1 * se3.log((quad_goal * quad_q.inverse()).matrix()) res = self.inv_kin.compute(q,", "in range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it) # Compute inverse kinematics over", "= self.inv_kin.J[6:(self.inv_kin.ne + 2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it]", "of joints in config file not same as required for", "JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init = self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps),", "init_state self.contact_sequence = contact_sequence self.dynamic_sequence = dynamic_sequence self.q_via = None", "q, dq) dq = self.inv_kin.compute( q, dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref,", "range (self.num_time_steps): self.joint_des[:,i] = self.q_init[7 : ].T else: joint_traj_gen.joint_traj(self.q_via) for", "__init__(self, position, start_time, end_time): self.pos = position self.init_time = start_time", "full trajectory. self.inv_kin.is_init_time = 0 q, dq = self.q_init.copy(), self.dq_init.copy()", "import RobotWrapper import pinocchio as se3 from pinocchio.utils import zero", "self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff) for eff in self.robot.effs] self.hip_ids =", "eff in enumerate(effs): num_contacts = len(contact_states(i)) contacts[eff] = [] for", "from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import * from pymomentum", "on the internal array. self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T", "= np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i) for i in range(init_state.effNum())]) endeff_vel_ref", "it in range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q =", "range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t) return np.matrix(q) class MomentumKinematicsOptimizer(object): def __init__(self):", "['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff in self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names)", "%d joints.' % ( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T q[2]", "z_offset + cnt[i].position()[idx] poly = poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t,", "self.init_time def end_time(self): return self.final_time def get_contact_plan(contact_states, effs): contacts =", "= contact_sequence self.dynamic_sequence = dynamic_sequence self.q_via = None # Create", "file # q_jump = [1., 0.1, -0.2 ,0.1, -0.2 ,-0.1,", "for it in range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom", "dq) # Storing on the internal array. self.com_kin[it] = self.inv_kin.robot.com(q).T", "joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init = self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float)", "Storing on the kinematic sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com =", "cnt[i+1].start_time()] for idx in range(3): via = None if idx", "np.zeros((self.num_time_steps, 3)) self.amom_kin = np.zeros((self.num_time_steps, 3)) self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq))", "not same as required for robot\\n' + 'Got %d joints", "np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity_wrt_base':", "self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids) # Storing on the", "dynamic_sequence self.q_via = None # Create array with centroidal and", "self.final_time def get_contact_plan(contact_states, effs): contacts = {} for i, eff", "-.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T # q_via0 = np.vstack((q_via.T, q_jump))", "= se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = (self.reg_orientation * se3.log((quad_goal", "endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps", "= 1. else: endeff_contact[it][eff] = 0. return endeff_pos_ref, endeff_vel_ref, endeff_contact", "MomentumKinematicsOptimizer(object): def __init__(self): self.q_init = None self.dq_init = None self.reg_orientation", "self.dynamic_sequence = dynamic_sequence self.q_via = None # Create array with", "dt = mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps contacts", "1e-2 self.reg_joint_position = 2. self.joint_des = None def reset(self): self.kinematics_sequence", "= [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly = poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly)", "2019-10-08 ''' import os import numpy as np from momentumopt.kinoptpy.qp", "def framesPos(frames): return np.vstack([data.oMf[idx].translation for idx in frames]).reshape(-1) def framesVel(frames):", "q, dq = self.q_init.copy(), self.dq_init.copy() for it in range(self.num_time_steps): quad_goal", "for i in range(3)] # HACK: If the velocity is", "idx in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If there is a", "oriented base. quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref =", "= {} for eff in effs: cnt = contacts[eff] num_contacts", "if np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff] = 1. else: endeff_contact[it][eff] =", "self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) # Integrate to the next state.", "1: t = [cnt[i].end_time(), cnt[i+1].start_time()] for idx in range(3): via", "amom_ref = np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i) for i in range(init_state.effNum())])", "poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly) eff_traj_poly[eff] = poly_traj #", "optimization for the initial configuration only once. if self.q_init is", "% ( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height", "else: endeff_contact[it][eff] = 0. return endeff_pos_ref, endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object):", "np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via is None: for i in range", "self.joint_des[:,it] = joint_traj_gen.eval_traj(it) # Compute inverse kinematics over the full", "3)) self.amom_kin = np.zeros((self.num_time_steps, 3)) self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin", "joint_regularization_ref = self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T - q[7 : ]) #", "as np from momentumopt.kinoptpy.qp import QpSolver from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics", "''' import os import numpy as np from momentumopt.kinoptpy.qp import", "cnt[i].position()[idx] poly = poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly) eff_traj_poly[eff]", "self.com_kin = np.zeros((self.num_time_steps, 3)) self.lmom_kin = np.zeros((self.num_time_steps, 3)) self.amom_kin =", "self.dq_kin[it] = dq.T # The endeffector informations as well. self.motion_eff['trajectory'][it]", "np.pi, -np.pi/2, np.pi]).T # q_max = np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2,", "# self.q_via = np.vstack((q_via0, q_max.T)) joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps =", "trajectory. self.inv_kin.is_init_time = 0 q, dq = self.q_init.copy(), self.dq_init.copy() for", "] - q[7 : ]) # Fill the kinematics results", "Computes the endeffector positions and velocities. Returns endeff_pos_ref, endeff_vel_ref [0]:", "in config file not same as required for robot\\n' +", "joints in config file not same as required for robot\\n'", "np.zeros((num_time_steps, num_eff, 3)) endeff_contact = np.zeros((num_time_steps, num_eff)) for it in", "= None def joint_traj(self, q_via): self.poly_traj = [] for i", "= init_state.com lmom_ref = np.zeros(3) amom_ref = np.zeros(3) endeff_pos_ref =", "self.endeff_traj_generator = endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations", "kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity", "3)) endeff_vel_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_contact = np.zeros((num_time_steps, num_eff))", "se3.integrate(self.robot.model, q, res) if np.linalg.norm(res) < 1e-3: print('Found initial configuration", "contact_sequence, dynamic_sequence, plotting=False): self.init_state = init_state self.contact_sequence = contact_sequence self.dynamic_sequence", "the endeffector is in # contact with the ground. if", "over the full trajectory. self.inv_kin.is_init_time = 0 q, dq =", "amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact, None) q = se3.integrate(self.robot.model, q, res)", "i in range (self.num_time_steps): self.joint_des[:,i] = self.q_init[7 : ].T else:", "to config file # q_jump = [1., 0.1, -0.2 ,0.1,", "for i in range (self.num_time_steps): self.joint_des[:,i] = self.q_init[7 : ].T", "momentum_kinematics_optimizer.py @package momentumopt @author <NAME> (<EMAIL>) @license License BSD-3-Clause @copyright", "= poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly) def eval_traj(self,t): q =", "np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff) for eff in self.robot.effs] self.hip_ids", "self.q_init = q.copy() self.dq_init = dq.copy() def optimize(self, init_state, contact_sequence,", "3={x, y, z}] ''' dt = mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names)", "[ PolynominalList(), PolynominalList(), PolynominalList() ] for i in range(num_contacts): #", "np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff) for", "contacts[eff] = [] for j in range(num_contacts): contact_ = contact_states(i)[j]", "np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact = np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T))", "-np.pi, -np.pi/2, np.pi, -np.pi/2, np.pi]).T # q_max = np.matrix([1.35, .7*np.pi/2,", "kinematics results self.com_dyn = np.zeros((self.num_time_steps, 3)) self.lmom_dyn = np.zeros((self.num_time_steps, 3))", "-np.pi/2, np.pi]).T # q_max = np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi,", "dq) dq = self.inv_kin.compute( q, dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it],", "effs): contacts = {} for i, eff in enumerate(effs): num_contacts", "= q[:3] kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity =", "required for robot\\n' + 'Got %d joints but robot expects", "z_offset): effs = contacts.keys() eff_traj_poly = {} for eff in", "amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) # Integrate to the next", "return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx in frames ]).reshape(-1) data", "eff_traj_poly class EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset = 0.1 def get_z_bound(self,", "poly_traj = [ PolynominalList(), PolynominalList(), PolynominalList() ] for i in", "i in range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it * dt) for i", "and endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run the optimization for", "is None: for i in range (self.num_time_steps): self.joint_des[:,i] = self.q_init[7", "self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) # Integrate to the next state. q", "np.append(self.poly_traj, [PolynominalList()]) for j in range(len(self.q_init)): for i in range", "Copyright (c) 2019, New York University and Max Planck Gesellschaft.", "* 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] =", "poly = poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly) else: t =", "endeff_contact[it][eff] = 1. else: endeff_contact[it][eff] = 0. return endeff_pos_ref, endeff_vel_ref,", "def optimize_initial_position(self, init_state): # Optimize the initial configuration q =", "range(num_contacts): # Create a constant polynominal for endeffector on the", "= self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom =", "< num_contacts - 1: t = [cnt[i].end_time(), cnt[i+1].start_time()] for idx", "se3.log((quad_goal * quad_q.inverse()).matrix()) res = self.inv_kin.compute(q, dq, com_ref, lmom_ref, amom_ref,", "self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) # Integrate to the", "np.vstack((q_via0, q_max.T)) joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init =", "self.reset() # Holds dynamics and kinematics results self.com_dyn = np.zeros((self.num_time_steps,", "assume the endeffector is in # contact with the ground.", "self.hip_ids = [self.robot.model.getFrameId(name) for name in self.hip_names] self.eff_names = ['{}_{}'.format(eff,", "it in range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it) # Compute inverse kinematics", "= start_time self.final_time = end_time def position(self): return self.pos def", "= poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt,", "dq[6:] def optimize_initial_position(self, init_state): # Optimize the initial configuration q", "< 1e-3: print('Found initial configuration after {} iterations'.format(iters + 1))", "Planck Gesellschaft. @date 2019-10-08 ''' import os import numpy as", "= max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return z_max, z_min def __call__(self, mom_kin_optimizer):", ": ].T else: joint_traj_gen.joint_traj(self.q_via) for it in range(self.num_time_steps): self.joint_des[:,it] =", "np.zeros(3) amom_ref = np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i) for i in", "transition between # the two contact points. if i <", "self.q_init.copy(), self.dq_init.copy() for it in range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0,", "self.endeff_contact = \\ self.endeff_traj_generator(self) def fill_kinematic_result(self, it, q, dq): def", "import QpSolver from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from pinocchio import RobotWrapper", "np.zeros((self.num_time_steps, 3)) self.lmom_dyn = np.zeros((self.num_time_steps, 3)) self.amom_dyn = np.zeros((self.num_time_steps, 3))", "it in range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it]", "self.max_iterations - 1: print('Failed to converge for initial setup.') print(\"initial", "z_min def __call__(self, mom_kin_optimizer): ''' Computes the endeffector positions and", "z}] ''' dt = mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names) num_time_steps =", "np.zeros((self.num_time_steps, 3)) self.amom_dyn = np.zeros((self.num_time_steps, 3)) self.com_kin = np.zeros((self.num_time_steps, 3))", "num_eff, 3={x, y, z}] ''' dt = mom_kin_optimizer.dt num_eff =", "3))) endeff_contact = np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7]", "# Adding small P controller for the base orientation to", "range(num_contacts): contact_ = contact_states(i)[j] start_time = contact_.start_time end_time = contact_.end_time", "3)) self.lmom_dyn = np.zeros((self.num_time_steps, 3)) self.amom_dyn = np.zeros((self.num_time_steps, 3)) self.com_kin", "planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations self.eps = eps self.robot = RobotWrapper()", "configuration: \\n\", q) self.q_init = q.copy() self.dq_init = dq.copy() def", "np.zeros((self.num_time_steps, 3)) self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv))", "joints but robot expects %d joints.' % ( len(plan_joint_init_pos), self.robot.num_ctrl_joints))", "2]), self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return z_max, z_min", "in self.hip_names] self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff in self.robot.effs]", "self.poly_traj[j].append(t, poly) def eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float) for j in", "+ 0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com lmom_ref =", "inverse kinematics over the full trajectory. self.inv_kin.is_init_time = 0 q,", "num_contacts - 1: t = [cnt[i].end_time(), cnt[i+1].start_time()] for idx in", "np.zeros((self.num_time_steps, 3)) self.com_kin = np.zeros((self.num_time_steps, 3)) self.lmom_kin = np.zeros((self.num_time_steps, 3))", "= 0. return endeff_pos_ref, endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object): def __init__(self):", "= hg.angular.T self.q_kin[it] = q.T self.dq_kin[it] = dq.T # The", "self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3 *", "q, res) if np.linalg.norm(res) < 1e-3: print('Found initial configuration after", "np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i) for i in range(init_state.effNum())]) endeff_vel_ref =", "self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via is None: for i", "# Compute inverse kinematics over the full trajectory. self.inv_kin.is_init_time =", "None if idx == 2: via = z_offset + cnt[i].position()[idx]", "frames]).reshape(-1) def framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx in", "if i < num_contacts - 1: t = [cnt[i].end_time(), cnt[i+1].start_time()]", "self.reg_joint_position = 2. self.joint_des = None def reset(self): self.kinematics_sequence =", "pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class Contact(object):", "endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it * dt) for i in range(3)] #", "poly) else: t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly = poly_points(t, q_via[i-1,j+1],", "quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = 1e-1 *", "self.dq_init = dq.copy() def optimize(self, init_state, contact_sequence, dynamic_sequence, plotting=False): self.init_state", "config file # q_jump = [1., 0.1, -0.2 ,0.1, -0.2", "PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise ValueError( 'Number of joints", "University and Max Planck Gesellschaft. @date 2019-10-08 ''' import os", "quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]),", "range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q = se3.Quaternion(float(q[6]), float(q[3]),", "Integrate to the next state. q = se3.integrate(self.robot.model, q, dq", "np.vstack([data.oMf[idx].translation for idx in frames]).reshape(-1) def framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q,", "contacts[eff].append(Contact(position, start_time, end_time)) return contacts def generate_eff_traj(contacts, z_offset): effs =", "shape=[num_time_steps, num_eff, 3={x, y, z}] ''' dt = mom_kin_optimizer.dt num_eff", "eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float) for j in range(len(self.q_init)): q[0,j] =", "for the base orientation to always start with flat #", "points. if i < num_contacts - 1: t = [cnt[i].end_time(),", "array. self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T self.amom_kin[it] = hg.angular.T", "always start with flat # oriented base. quad_q = se3.Quaternion(float(q[6]),", "]) # Fill the kinematics results for it. self.inv_kin.forward_robot(q, dq)", "Max Planck Gesellschaft. @date 2019-10-08 ''' import os import numpy", "{} iterations'.format(iters + 1)) break if iters == self.max_iterations -", "poly) eff_traj_poly[eff] = poly_traj # returns end eff trajectories return", "information for it in range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] =", "q_via): self.poly_traj = [] for i in range(len(self.q_init)): self.poly_traj =", "is None: self.optimize_initial_position(init_state) # Get the desired joint trajectory #", "# joint_regularization_ref = self.reg_joint_position * (self.q_init[7 : ] - q[7", "poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly) def eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float)", "2: via = z_offset + cnt[i].position()[idx] poly = poly_points(t, cnt[i].position()[idx],", "- 1: t = [cnt[i].end_time(), cnt[i+1].start_time()] for idx in range(3):", "if len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise ValueError( 'Number of joints in", "np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff] = 1. else: endeff_contact[it][eff] = 0.", "the desired joint trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints)", "Optimize the initial configuration q = se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get(", "np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)) } def fill_data_from_dynamics(self): # The centroidal", "= self.robot.floor_height + 0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com", "# Run the optimization for the initial configuration only once.", "<NAME> (<EMAIL>) @license License BSD-3-Clause @copyright Copyright (c) 2019, New", "np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx in frames ]).reshape(-1) data =", "(self.q_init[7 : ] - q[7 : ]) # Fill the", "self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation = q[3:7]", "j in range(num_contacts): contact_ = contact_states(i)[j] start_time = contact_.start_time end_time", "self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions = q[7:]", "contact following, add the transition between # the two contact", "position = contact_.position contacts[eff].append(Contact(position, start_time, end_time)) return contacts def generate_eff_traj(contacts,", "eff trajectories return eff_traj_poly class EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset =", "results for it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q, dq) dq =", "dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) # Integrate", "import numpy as np from momentumopt.kinoptpy.qp import QpSolver from momentumopt.kinoptpy.inverse_kinematics", "contacts def generate_eff_traj(contacts, z_offset): effs = contacts.keys() eff_traj_poly = {}", "se3.log((quad_goal * quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T", "= \\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it] -", "self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it]", "range(3): via = None if idx == 2: via =", "configuration q = se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos)", "[eff_traj_poly[name][i].deval(it * dt) for i in range(3)] # HACK: If", "contact points. if i < num_contacts - 1: t =", "framesPos(frames): return np.vstack([data.oMf[idx].translation for idx in frames]).reshape(-1) def framesVel(frames): return", "'Number of joints in config file not same as required", "framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids) # Storing on", "np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com lmom_ref = np.zeros(3) amom_ref = np.zeros(3)", "contact_.start_time end_time = contact_.end_time position = contact_.position contacts[eff].append(Contact(position, start_time, end_time))", "self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact =", "= np.zeros((self.num_time_steps, 3)) self.lmom_kin = np.zeros((self.num_time_steps, 3)) self.amom_kin = np.zeros((self.num_time_steps,", "= dynamic_sequence self.q_via = None # Create array with centroidal", "for eff in self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name) for name in", "1e-3: print('Found initial configuration after {} iterations'.format(iters + 1)) break", "dq = self.inv_kin.compute( q, dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it],", "contact with the ground. if np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff] =", "only once. if self.q_init is None: self.optimize_initial_position(init_state) # Get the", "(c) 2019, New York University and Max Planck Gesellschaft. @date", "for j in range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t) return np.matrix(q) class", "once. if self.q_init is None: self.optimize_initial_position(init_state) # Get the desired", "-0.2 ,0.1, -0.2 ,-0.1, 0.2 ,-0.1, 0.2] # q_via =", "for it in range(num_time_steps): for eff, name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff]", "-np.pi/2, np.pi, -np.pi/2, np.pi]).T # q_max = np.matrix([1.35, .7*np.pi/2, -.7*np.pi,", "range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If there is a contact following,", "= self.inv_kin.compute( q, dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it],", "endeff_vel_ref, endeff_contact, None) q = se3.integrate(self.robot.model, q, res) if np.linalg.norm(res)", "joint_regularization_ref) # Integrate to the next state. q = se3.integrate(self.robot.model,", "in range(num_contacts): # Create a constant polynominal for endeffector on", "endeff_contact, None) q = se3.integrate(self.robot.model, q, res) if np.linalg.norm(res) <", "= np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff) for eff in self.robot.effs]", "self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne + 2) * 3].dot(dq).T", "endeff_pos_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] [1]: endeff_vel_ref: np.array,", "= end_time def position(self): return self.pos def start_time(self): return self.init_time", "via = z_offset + cnt[i].position()[idx] poly = poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx],", "3 * self.inv_kin.ne)) } def fill_data_from_dynamics(self): # The centroidal information", "range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it * dt) for i in range(3)]", "None: endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep)", "to the next state. q = se3.integrate(self.robot.model, q, dq *", "pinocchio import RobotWrapper import pinocchio as se3 from pinocchio.utils import", "# oriented base. quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref", "for it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q, dq) dq = self.inv_kin.compute(", "self.poly_traj[j].append(t, poly) else: t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly = poly_points(t,", "float(q[4]), float(q[5])) amom_ref = (self.reg_orientation * se3.log((quad_goal * quad_q.inverse()).matrix()).T +", "JointTrajectoryGenerator(object): def __init__(self): self.dt =.01 self.num_time_steps = None self.q_init =", "initial configuration q = se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if", "joints.' % ( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T q[2] =", "-.7*np.pi/2, .7*np.pi]).T # q_via0 = np.vstack((q_via.T, q_jump)) # self.q_via =", "end_time)) return contacts def generate_eff_traj(contacts, z_offset): effs = contacts.keys() eff_traj_poly", "np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height + 0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref", "endeffector positions and velocities. Returns endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref: np.array,", "for it in range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it) # Compute inverse", "# TODO: this is for jump, should go to config", "self.lmom_kin = np.zeros((self.num_time_steps, 3)) self.amom_kin = np.zeros((self.num_time_steps, 3)) self.q_kin =", "for idx in frames]).reshape(-1) def framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3]", "None: for i in range (self.num_time_steps): self.joint_des[:,i] = self.q_init[7 :", "import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import * from pymomentum import \\", "eff in self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff = {", "= np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7] = quad_goal.coeffs()", "in range(num_time_steps): for eff, name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it", "eff, name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it * dt) for", "self.endeff_vel_ref, self.endeff_contact = \\ self.endeff_traj_generator(self) def fill_kinematic_result(self, it, q, dq):", "mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate minimum jerk trajectories", "[PolynominalList()]) for j in range(len(self.q_init)): for i in range (len(q_via[:,0])+1):", "print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this is for jump, should go", "= self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T - q[7 : ]) # joint_regularization_ref", "the full trajectory. self.inv_kin.is_init_time = 0 q, dq = self.q_init.copy(),", "centroidal and endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run the optimization", "hg.linear.T self.amom_kin[it] = hg.angular.T self.q_kin[it] = q.T self.dq_kin[it] = dq.T", "in range(num_contacts): contact_ = contact_states(i)[j] start_time = contact_.start_time end_time =", "Contact(object): def __init__(self, position, start_time, end_time): self.pos = position self.init_time", "in range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t) return np.matrix(q) class MomentumKinematicsOptimizer(object): def", "q_jump = [1., 0.1, -0.2 ,0.1, -0.2 ,-0.1, 0.2 ,-0.1,", "self.q_via is None: for i in range (self.num_time_steps): self.joint_des[:,i] =", "y, z}] ''' dt = mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names) num_time_steps", "self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt, self.num_time_steps] poly", "kinematic sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it] kinematic_state.lmom =", "= [1., 0.1, -0.2 ,0.1, -0.2 ,-0.1, 0.2 ,-0.1, 0.2]", "{} for i, eff in enumerate(effs): num_contacts = len(contact_states(i)) contacts[eff]", "self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T self.amom_kin[it] = hg.angular.T self.q_kin[it]", "== self.max_iterations - 1: print('Failed to converge for initial setup.')", "plotting=False): self.init_state = init_state self.contact_sequence = contact_sequence self.dynamic_sequence = dynamic_sequence", "q_via[i,0]/self.dt] poly = poly_points(t, q_via[i-1,j+1], q_via[i,j+1]) self.poly_traj[j].append(t, poly) def eval_traj(self,t):", "} def fill_data_from_dynamics(self): # The centroidal information for it in", "dq): def framesPos(frames): return np.vstack([data.oMf[idx].translation for idx in frames]).reshape(-1) def", "endeff_vel_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_contact = np.zeros((num_time_steps, num_eff)) for", "plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise ValueError(", "= [0, q_via[0,0]/self.dt] poly = poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly)", "momentumopt @author <NAME> (<EMAIL>) @license License BSD-3-Clause @copyright Copyright (c)", "self.poly_traj = np.append(self.poly_traj, [PolynominalList()]) for j in range(len(self.q_init)): for i", "\\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class Contact(object): def __init__(self,", "start_time(self): return self.init_time def end_time(self): return self.final_time def get_contact_plan(contact_states, effs):", "endeff_vel_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] ''' dt =", "= \\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids) # Storing on the kinematic", "self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt, self.num_time_steps] poly = poly_points(t,", "= mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate minimum jerk", "trajectories return eff_traj_poly class EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset = 0.1", "q[7:] = np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height + 0.32 dq =", "in # contact with the ground. if np.all(endeff_vel_ref[it][eff] == 0.):", "self.final_time = end_time def position(self): return self.pos def start_time(self): return", "to converge for initial setup.') print(\"initial configuration: \\n\", q) self.q_init", "os import numpy as np from momentumopt.kinoptpy.qp import QpSolver from", "is in # contact with the ground. if np.all(endeff_vel_ref[it][eff] ==", "\\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids)", "raise ValueError( 'Number of joints in config file not same", "q[7 : ]) # joint_regularization_ref = self.reg_joint_position * (self.q_init[7 :", "= self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact", "= contacts[eff] num_contacts = len(cnt) poly_traj = [ PolynominalList(), PolynominalList(),", "contacts[eff] num_contacts = len(cnt) poly_traj = [ PolynominalList(), PolynominalList(), PolynominalList()", "q, dq, self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) #", "= np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff)", "= self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q, dq) # Storing on the", "0. return endeff_pos_ref, endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object): def __init__(self): self.dt", "via = None if idx == 2: via = z_offset", "= self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position =", "= self.inv_kin.robot.centroidalMomentum(q, dq) # Storing on the internal array. self.com_kin[it]", "3)) self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names", "import pinocchio as se3 from pinocchio.utils import zero from pymomentum", "return endeff_pos_ref, endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object): def __init__(self): self.dt =.01", "* dt) for i in range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it *", "for iters in range(self.max_iterations): # Adding small P controller for", "= np.zeros((num_time_steps, num_eff)) for it in range(num_time_steps): for eff, name", "# q_via0 = np.vstack((q_via.T, q_jump)) # self.q_via = np.vstack((q_via0, q_max.T))", "self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne + 2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\", "@license License BSD-3-Clause @copyright Copyright (c) 2019, New York University", "class MomentumKinematicsOptimizer(object): def __init__(self): self.q_init = None self.dq_init = None", "endeffector is in # contact with the ground. if np.all(endeff_vel_ref[it][eff]", "self.planner_setting = planner_setting if endeff_traj_generator is None: endeff_traj_generator = EndeffectorTrajectoryGenerator()", "kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation", "self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise ValueError( 'Number of", "self.dq_init = None self.reg_orientation = 1e-2 self.reg_joint_position = 2. self.joint_des", "se3 from pinocchio.utils import zero from pymomentum import * from", "= np.matrix([.75, np.pi/2, -np.pi, np.pi/2, -np.pi, -np.pi/2, np.pi, -np.pi/2, np.pi]).T", "= se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) != self.robot.num_ctrl_joints:", "= planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations self.eps = eps self.robot =", "2]), self.min_bound) return z_max, z_min def __call__(self, mom_kin_optimizer): ''' Computes", "def __init__(self, position, start_time, end_time): self.pos = position self.init_time =", "internal array. self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T self.amom_kin[it] =", "float(q[5])) amom_ref = 1e-1 * se3.log((quad_goal * quad_q.inverse()).matrix()) res =", "for initial setup.') print(\"initial configuration: \\n\", q) self.q_init = q.copy()", "mom_kin_optimizer.eff_names) # Generate minimum jerk trajectories eff_traj_poly = generate_eff_traj(contacts, self.z_offset)", "num_eff, 3)) endeff_vel_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_contact = np.zeros((num_time_steps,", "# q_via = np.matrix([.75, np.pi/2, -np.pi, np.pi/2, -np.pi, -np.pi/2, np.pi,", "dt) for i in range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it * dt)", "on the ground. t = [cnt[i].start_time(), cnt[i].end_time()] for idx in", "to always start with flat # oriented base. quad_q =", "= contacts.keys() eff_traj_poly = {} for eff in effs: cnt", "iterations'.format(iters + 1)) break if iters == self.max_iterations - 1:", "a contact following, add the transition between # the two", "self.amom_kin = np.zeros((self.num_time_steps, 3)) self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin =", "* from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import * from", ",0.1, -0.2 ,-0.1, 0.2 ,-0.1, 0.2] # q_via = np.matrix([.75,", "{ 'trajectory': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3 *", "* se3.log((quad_goal * quad_q.inverse()).matrix()) res = self.inv_kin.compute(q, dq, com_ref, lmom_ref,", "i==0: t = [0, q_via[0,0]/self.dt] poly = poly_points(t, self.q_init[j], q_via[i,j+1])", "dq) self.fill_kinematic_result(it, q, dq) dq = self.inv_kin.compute( q, dq, self.com_dyn[it],", "self.eff_names) self.motion_eff = { 'trajectory': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity':", "New York University and Max Planck Gesellschaft. @date 2019-10-08 '''", "contacts.keys() eff_traj_poly = {} for eff in effs: cnt =", "self.num_time_steps joint_traj_gen.q_init = self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via", "in self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff = { 'trajectory':", "def joint_traj(self, q_via): self.poly_traj = [] for i in range(len(self.q_init)):", "__call__(self, mom_kin_optimizer): ''' Computes the endeffector positions and velocities. Returns", "joint_traj_gen.q_init = self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via is", "the endeffector positions and velocities. Returns endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref:", "= eps self.robot = RobotWrapper() self.reset() # Holds dynamics and", "for eff in self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff =", "if idx == 2: via = z_offset + cnt[i].position()[idx] poly", "= z_offset + cnt[i].position()[idx] poly = poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via)", "= np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com lmom_ref = np.zeros(3) amom_ref =", "= np.append(self.poly_traj, [PolynominalList()]) for j in range(len(self.q_init)): for i in", "* dt) for i in range(3)] # HACK: If the", "= quad_goal.coeffs() for iters in range(self.max_iterations): # Adding small P", "print(\"initial configuration: \\n\", q) self.q_init = q.copy() self.dq_init = dq.copy()", "end_time): self.pos = position self.init_time = start_time self.final_time = end_time", "q_max = np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2,", "self.endeff_contact[it], joint_regularization_ref) # Integrate to the next state. q =", "endeff_contact = np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7] =", "joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init = self.q_init[7:] self.joint_des", "= q.T self.dq_kin[it] = dq.T # The endeffector informations as", "com_ref = init_state.com lmom_ref = np.zeros(3) amom_ref = np.zeros(3) endeff_pos_ref", "= dq.T # The endeffector informations as well. self.motion_eff['trajectory'][it] =", "# The endeffector informations as well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it]", "dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com lmom_ref = np.zeros(3) amom_ref", "self.endeff_traj_generator(self) def fill_kinematic_result(self, it, q, dq): def framesPos(frames): return np.vstack([data.oMf[idx].translation", "self.init_time = start_time self.final_time = end_time def position(self): return self.pos", "kinematics over the full trajectory. self.inv_kin.is_init_time = 0 q, dq", "* quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T -", "it, q, dq): def framesPos(frames): return np.vstack([data.oMf[idx].translation for idx in", "flat # oriented base. quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5]))", "endeffector position and velocity trajectories. endeff_pos_ref = np.zeros((num_time_steps, num_eff, 3))", "Create array with centroidal and endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() #", "se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = 1e-1 * se3.log((quad_goal *", "None # Create array with centroidal and endeffector informations. self.fill_data_from_dynamics()", ".7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T # q_via0", "__init__(self): self.z_offset = 0.1 def get_z_bound(self, mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:,", "return self.init_time def end_time(self): return self.final_time def get_contact_plan(contact_states, effs): contacts", "self.robot.joints_list[-1]) for eff in self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff", "self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run the optimization for the initial configuration", "= np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if self.q_via is None: for i in", "The endeffector informations as well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] =", "q_jump)) # self.q_via = np.vstack((q_via0, q_max.T)) joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps", "q_max.T)) joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init = self.q_init[7:]", "enumerate(effs): num_contacts = len(contact_states(i)) contacts[eff] = [] for j in", "poly = poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly) eff_traj_poly[eff] =", "self.pos = position self.init_time = start_time self.final_time = end_time def", "z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return z_max, z_min def __call__(self,", "[] for j in range(num_contacts): contact_ = contact_states(i)[j] start_time =", "kinematic_state.com = self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position", "get_contact_plan(contact_states, effs): contacts = {} for i, eff in enumerate(effs):", "from momentumopt.kinoptpy.min_jerk_traj import * from pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\", "= self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self):", "= self.q_init[7 : ].T else: joint_traj_gen.joint_traj(self.q_via) for it in range(self.num_time_steps):", "* (self.q_init[7 : ] - q[7 : ]) # Fill", "np.zeros((1,len(self.q_init)),float) for j in range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t) return np.matrix(q)", "'velocity': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)),", "''' @file momentum_kinematics_optimizer.py @package momentumopt @author <NAME> (<EMAIL>) @license License", "dq.T # The endeffector informations as well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids)", "poly) def eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float) for j in range(len(self.q_init)):", "np.zeros((self.num_time_steps, 3)) self.lmom_kin = np.zeros((self.num_time_steps, 3)) self.amom_kin = np.zeros((self.num_time_steps, 3))", "self.motion_eff = { 'trajectory': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps,", "endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run the optimization for the", "+ cnt[i].position()[idx] poly = poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly)", "eff_traj_poly = generate_eff_traj(contacts, self.z_offset) # Compute the endeffector position and", "{} for eff in effs: cnt = contacts[eff] num_contacts =", "self.inv_kin.robot.com(q).T self.lmom_kin[it] = hg.linear.T self.amom_kin[it] = hg.angular.T self.q_kin[it] = q.T", "# Create array with centroidal and endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory()", "framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne + 2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] =", "self.init_state = init_state self.contact_sequence = contact_sequence self.dynamic_sequence = dynamic_sequence self.q_via", "range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it) # Compute inverse kinematics over the", "TODO: this is for jump, should go to config file", "float(q[3]), float(q[4]), float(q[5])) amom_ref = 1e-1 * se3.log((quad_goal * quad_q.inverse()).matrix())", "in range (len(q_via[:,0])+1): if i==0: t = [0, q_via[0,0]/self.dt] poly", "in range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q = se3.Quaternion(float(q[6]),", "0.1 def get_z_bound(self, mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min", "self.q_init = None self.poly_traj = None def joint_traj(self, q_via): self.poly_traj", "dynamics and kinematics results self.com_dyn = np.zeros((self.num_time_steps, 3)) self.lmom_dyn =", "If the velocity is zero, assume the endeffector is in", "for j in range(len(self.q_init)): for i in range (len(q_via[:,0])+1): if", "self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for idx in frames ]).reshape(-1) data = self.inv_kin.robot.data", "]).reshape(-1) data = self.inv_kin.robot.data hg = self.inv_kin.robot.centroidalMomentum(q, dq) # Storing", "self.q_via = np.vstack((q_via0, q_max.T)) joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps", "q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:]", "quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = (self.reg_orientation *", "cnt = contacts[eff] num_contacts = len(cnt) poly_traj = [ PolynominalList(),", "quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T - q[7", "kinematics results for it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q, dq) dq", "returns end eff trajectories return eff_traj_poly class EndeffectorTrajectoryGenerator(object): def __init__(self):", "velocities. Returns endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref: np.array, shape=[num_time_steps, num_eff, 3={x,", "0.2] # q_via = np.matrix([.75, np.pi/2, -np.pi, np.pi/2, -np.pi, -np.pi/2,", "self.q_via = None # Create array with centroidal and endeffector", "iters in range(self.max_iterations): # Adding small P controller for the", "(len(q_via[:,0])+1): if i==0: t = [0, q_via[0,0]/self.dt] poly = poly_points(t,", "self.robot.floor_height + 0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T com_ref = init_state.com lmom_ref", "initial setup.') print(\"initial configuration: \\n\", q) self.q_init = q.copy() self.dq_init", "0.]).T)) quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = (self.reg_orientation", "go to config file # q_jump = [1., 0.1, -0.2", "np.vstack((q_via.T, q_jump)) # self.q_via = np.vstack((q_via0, q_max.T)) joint_traj_gen = JointTrajectoryGenerator()", "self.poly_traj[j].eval(t) return np.matrix(q) class MomentumKinematicsOptimizer(object): def __init__(self): self.q_init = None", "eps self.robot = RobotWrapper() self.reset() # Holds dynamics and kinematics", "def get_z_bound(self, mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min =", "# q_jump = [1., 0.1, -0.2 ,0.1, -0.2 ,-0.1, 0.2", ",-0.1, 0.2 ,-0.1, 0.2] # q_via = np.matrix([.75, np.pi/2, -np.pi,", "reset(self): self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting, max_iterations=50,", "= np.zeros((1,len(self.q_init)),float) for j in range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t) return", "with centroidal and endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run the", "York University and Max Planck Gesellschaft. @date 2019-10-08 ''' import", "q[0,j] = self.poly_traj[j].eval(t) return np.matrix(q) class MomentumKinematicsOptimizer(object): def __init__(self): self.q_init", "self.robot.num_ctrl_joints: raise ValueError( 'Number of joints in config file not", "= np.zeros((self.num_time_steps, 3)) self.amom_dyn = np.zeros((self.num_time_steps, 3)) self.com_kin = np.zeros((self.num_time_steps,", "idx in range(3): via = None if idx == 2:", "self.joint_des[:,i] = self.q_init[7 : ].T else: joint_traj_gen.joint_traj(self.q_via) for it in", "kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:] def optimize_initial_position(self, init_state): #", "np.matrix(q) class MomentumKinematicsOptimizer(object): def __init__(self): self.q_init = None self.dq_init =", "range(num_time_steps): for eff, name in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it *", "contact_states(i)[j] start_time = contact_.start_time end_time = contact_.end_time position = contact_.position", "Storing on the internal array. self.com_kin[it] = self.inv_kin.robot.com(q).T self.lmom_kin[it] =", "dq = self.q_init.copy(), self.dq_init.copy() for it in range(self.num_time_steps): quad_goal =", "poly) elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt, self.num_time_steps] poly = poly_points(t, q_via[i-1,j+1],", "self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names = ['{}_HFE'.format(eff) for eff in", "* from pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep", "= None self.reg_orientation = 1e-2 self.reg_joint_position = 2. self.joint_des =", "%d joints but robot expects %d joints.' % ( len(plan_joint_init_pos),", "ValueError( 'Number of joints in config file not same as", "= se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref = 1e-1 * se3.log((quad_goal", "@author <NAME> (<EMAIL>) @license License BSD-3-Clause @copyright Copyright (c) 2019,", "kinematic_state.amom = self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions", ".7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T # q_via0 = np.vstack((q_via.T,", "Fill the kinematics results for it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q,", "quad_goal.coeffs() for iters in range(self.max_iterations): # Adding small P controller", "[1., 0.1, -0.2 ,0.1, -0.2 ,-0.1, 0.2 ,-0.1, 0.2] #", "If there is a contact following, add the transition between", "joint_regularization_ref = self.reg_joint_position * (self.q_init[7 : ] - q[7 :", "t = [cnt[i].start_time(), cnt[i].end_time()] for idx in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx]))", "pymomentum import * from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import", "= se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7] = quad_goal.coeffs() for iters in", "= KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None,", "None self.dq_init = None self.reg_orientation = 1e-2 self.reg_joint_position = 2.", "['{}_HFE'.format(eff) for eff in self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name) for name", "for the initial configuration only once. if self.q_init is None:", "planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations self.eps = eps", "\\ self.endeff_traj_generator(self) def fill_kinematic_result(self, it, q, dq): def framesPos(frames): return", "the transition between # the two contact points. if i", "RobotWrapper import pinocchio as se3 from pinocchio.utils import zero from", "import os import numpy as np from momentumopt.kinoptpy.qp import QpSolver", "def end_time(self): return self.final_time def get_contact_plan(contact_states, effs): contacts = {}", "=.01 self.num_time_steps = None self.q_init = None self.poly_traj = None", "return eff_traj_poly class EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset = 0.1 def", "endeff_traj_generator is None: endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator self.dt", "in range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj, [PolynominalList()]) for j in range(len(self.q_init)):", "self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations self.eps = eps self.robot", "* (np.matrix(self.joint_des[:,it]).T - q[7 : ]) # joint_regularization_ref = self.reg_joint_position", "= self.num_time_steps joint_traj_gen.q_init = self.q_init[7:] self.joint_des = np.zeros((len(self.q_init[7:]),self.num_time_steps), float) if", "+ self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T - q[7 :", "]) # joint_regularization_ref = self.reg_joint_position * (self.q_init[7 : ] -", "controller for the base orientation to always start with flat", "joint_traj_gen.eval_traj(it) # Compute inverse kinematics over the full trajectory. self.inv_kin.is_init_time", "mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states,", "self.num_time_steps] poly = poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly) else: t", "endeff_pos_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref = np.zeros((num_time_steps, num_eff, 3))", "# HACK: If the velocity is zero, assume the endeffector", "PointContactInverseKinematics from pinocchio import RobotWrapper import pinocchio as se3 from", "informations as well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne +", "[q_via[i-1,0]/self.dt, self.num_time_steps] poly = poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly) else:", "np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] ''' dt = mom_kin_optimizer.dt", "[eff_traj_poly[name][i].eval(it * dt) for i in range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it", "0.2 ,-0.1, 0.2] # q_via = np.matrix([.75, np.pi/2, -np.pi, np.pi/2,", "].T else: joint_traj_gen.joint_traj(self.q_via) for it in range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it)", "with the ground. if np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff] = 1.", "= np.vstack((q_via.T, q_jump)) # self.q_via = np.vstack((q_via0, q_max.T)) joint_traj_gen =", "for it in range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q", "0 q, dq = self.q_init.copy(), self.dq_init.copy() for it in range(self.num_time_steps):", "= \\ self.endeff_traj_generator(self) def fill_kinematic_result(self, it, q, dq): def framesPos(frames):", "elif(i==len(q_via[:,0])): t = [q_via[i-1,0]/self.dt, self.num_time_steps] poly = poly_points(t, q_via[i-1,j+1], self.q_init[j])", "0.]).T)) q[3:7] = quad_goal.coeffs() for iters in range(self.max_iterations): # Adding", "num_contacts = len(cnt) poly_traj = [ PolynominalList(), PolynominalList(), PolynominalList() ]", "= self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise ValueError( 'Number", "q_via[i,j+1]) self.poly_traj[j].append(t, poly) def eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float) for j", "(self.num_time_steps): self.joint_des[:,i] = self.q_init[7 : ].T else: joint_traj_gen.joint_traj(self.q_via) for it", "__init__(self): self.dt =.01 self.num_time_steps = None self.q_init = None self.poly_traj", "self.max_iterations = max_iterations self.eps = eps self.robot = RobotWrapper() self.reset()", "2019, New York University and Max Planck Gesellschaft. @date 2019-10-08", "self.q_init is None: self.optimize_initial_position(init_state) # Get the desired joint trajectory", "from pinocchio import RobotWrapper import pinocchio as se3 from pinocchio.utils", "def initialize(self, planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting", "def eval_traj(self,t): q = np.zeros((1,len(self.q_init)),float) for j in range(len(self.q_init)): q[0,j]", "amom_ref = (self.reg_orientation * se3.log((quad_goal * quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref", "max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting if endeff_traj_generator is", "two contact points. if i < num_contacts - 1: t", "with flat # oriented base. quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]),", "joint_traj_gen.joint_traj(self.q_via) for it in range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it) # Compute", "def start_time(self): return self.init_time def end_time(self): return self.final_time def get_contact_plan(contact_states,", "2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it]", "array with centroidal and endeffector informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run", "= planner_setting if endeff_traj_generator is None: endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator", ",-0.1, 0.2] # q_via = np.matrix([.75, np.pi/2, -np.pi, np.pi/2, -np.pi,", "contact_.end_time position = contact_.position contacts[eff].append(Contact(position, start_time, end_time)) return contacts def", "self.q_init[j]) self.poly_traj[j].append(t, poly) else: t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly =", "get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate minimum jerk trajectories eff_traj_poly = generate_eff_traj(contacts,", "= dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:] def optimize_initial_position(self,", "self.hip_names = ['{}_HFE'.format(eff) for eff in self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name)", "'velocity_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)) } def fill_data_from_dynamics(self): # The", "# Holds dynamics and kinematics results self.com_dyn = np.zeros((self.num_time_steps, 3))", "cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly) eff_traj_poly[eff] = poly_traj # returns", "eff_traj_poly[eff] = poly_traj # returns end eff trajectories return eff_traj_poly", "num_eff)) for it in range(num_time_steps): for eff, name in enumerate(mom_kin_optimizer.eff_names):", "= np.zeros(3) amom_ref = np.zeros(3) endeff_pos_ref = np.array([init_state.effPosition(i) for i", "KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper):", "in effs: cnt = contacts[eff] num_contacts = len(cnt) poly_traj =", "= np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_contact", "momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from pinocchio import RobotWrapper import pinocchio as", "] for i in range(num_contacts): # Create a constant polynominal", "'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne))", "is None: endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator self.dt =", "eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting if endeff_traj_generator is None:", "same as required for robot\\n' + 'Got %d joints but", "i in range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj, [PolynominalList()]) for j in", "self.q_init = None self.dq_init = None self.reg_orientation = 1e-2 self.reg_joint_position", "def reset(self): self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting,", "= np.array([init_state.effPosition(i) for i in range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3)))", "np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T #", "self.inv_kin.is_init_time = 0 q, dq = self.q_init.copy(), self.dq_init.copy() for it", "position self.init_time = start_time self.final_time = end_time def position(self): return", "initialize(self, planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting if", "len(plan_joint_init_pos) != self.robot.num_ctrl_joints: raise ValueError( 'Number of joints in config", "z_max, z_min def __call__(self, mom_kin_optimizer): ''' Computes the endeffector positions", "in enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it * dt) for i in", "= (self.reg_orientation * se3.log((quad_goal * quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref =", ": ] - q[7 : ]) # Fill the kinematics", "self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position * (np.matrix(self.joint_des[:,it]).T - q[7 : ])", "self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting, max_iterations=50, eps=0.001, endeff_traj_generator=None, RobotWrapper=QuadrupedWrapper): self.planner_setting", "max_iterations self.eps = eps self.robot = RobotWrapper() self.reset() # Holds", "range (len(q_via[:,0])+1): if i==0: t = [0, q_via[0,0]/self.dt] poly =", "optimize(self, init_state, contact_sequence, dynamic_sequence, plotting=False): self.init_state = init_state self.contact_sequence =", "it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q, dq) dq = self.inv_kin.compute( q,", "self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it] - framesVel(self.hip_ids) #", "# Optimize the initial configuration q = se3.neutral(self.robot.model) plan_joint_init_pos =", "- 1: print('Failed to converge for initial setup.') print(\"initial configuration:", "for idx in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If there is", "self.motion_eff['velocity'][it] - framesVel(self.hip_ids) # Storing on the kinematic sequence. kinematic_state", "endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps) self.max_iterations = max_iterations", "the endeffector position and velocity trajectories. endeff_pos_ref = np.zeros((num_time_steps, num_eff,", "pinocchio.utils import zero from pymomentum import * from momentumopt.quadruped.quadruped_wrapper import", "i in range(num_contacts): # Create a constant polynominal for endeffector", "self.robot = RobotWrapper() self.reset() # Holds dynamics and kinematics results", "self.motion_eff['trajectory_wrt_base'][it] = \\ self.motion_eff['trajectory'][it] - framesPos(self.hip_ids) self.motion_eff['velocity_wrt_base'][it] = \\ self.motion_eff['velocity'][it]", "momentumopt.kinoptpy.min_jerk_traj import * from pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps,", "= None self.q_init = None self.poly_traj = None def joint_traj(self,", "idx in frames]).reshape(-1) def framesVel(frames): return np.vstack([ self.inv_kin.get_world_oriented_frame_jacobian(q, idx).dot(dq)[:3] for", "PolynominalList() ] for i in range(num_contacts): # Create a constant", "= { 'trajectory': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3", "hg = self.inv_kin.robot.centroidalMomentum(q, dq) # Storing on the internal array.", "= self.amom_kin[it] kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions =", "# Get the desired joint trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) #", "from pymomentum import \\ PlannerVectorParam_KinematicDefaultJointPositions, \\ PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class", "contact_sequence self.dynamic_sequence = dynamic_sequence self.q_via = None # Create array", "0.): endeff_contact[it][eff] = 1. else: endeff_contact[it][eff] = 0. return endeff_pos_ref,", "'trajectory': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)),", "''' dt = mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps", "start with flat # oriented base. quad_q = se3.Quaternion(float(q[6]), float(q[3]),", "len(cnt) poly_traj = [ PolynominalList(), PolynominalList(), PolynominalList() ] for i", "via) poly_traj[idx].append(t, poly) eff_traj_poly[eff] = poly_traj # returns end eff", "self.amom_dyn = np.zeros((self.num_time_steps, 3)) self.com_kin = np.zeros((self.num_time_steps, 3)) self.lmom_kin =", "import * from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import *", "3)) self.lmom_kin = np.zeros((self.num_time_steps, 3)) self.amom_kin = np.zeros((self.num_time_steps, 3)) self.q_kin", "q, dq): def framesPos(frames): return np.vstack([data.oMf[idx].translation for idx in frames]).reshape(-1)", "start_time = contact_.start_time end_time = contact_.end_time position = contact_.position contacts[eff].append(Contact(position,", "mom_kin_optimizer): ''' Computes the endeffector positions and velocities. Returns endeff_pos_ref,", "@date 2019-10-08 ''' import os import numpy as np from", "float(q[5])) amom_ref = (self.reg_orientation * se3.log((quad_goal * quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1)", "shape=[num_time_steps, num_eff, 3={x, y, z}] [1]: endeff_vel_ref: np.array, shape=[num_time_steps, num_eff,", "dq.copy() def optimize(self, init_state, contact_sequence, dynamic_sequence, plotting=False): self.init_state = init_state", "centroidal information for it in range(self.num_time_steps): self.com_dyn[it] = self.dynamic_sequence.dynamics_states[it].com self.lmom_dyn[it]", "endeff_pos_ref = np.array([init_state.effPosition(i) for i in range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(),", "q.copy() self.dq_init = dq.copy() def optimize(self, init_state, contact_sequence, dynamic_sequence, plotting=False):", "in range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it * dt) for i in", "jump, should go to config file # q_jump = [1.,", "informations. self.fill_data_from_dynamics() self.fill_endeffector_trajectory() # Run the optimization for the initial", "q = se3.integrate(self.robot.model, q, res) if np.linalg.norm(res) < 1e-3: print('Found", "contacts = get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate minimum jerk trajectories eff_traj_poly", "3 * self.inv_kin.ne)), 'velocity': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps,", "robot expects %d joints.' % ( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] =", "def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact = \\ self.endeff_traj_generator(self) def fill_kinematic_result(self,", "0, 0.]).T)) q[3:7] = quad_goal.coeffs() for iters in range(self.max_iterations): #", "= mom_kin_optimizer.dt num_eff = len(mom_kin_optimizer.eff_names) num_time_steps = mom_kin_optimizer.num_time_steps contacts =", "return self.final_time def get_contact_plan(contact_states, effs): contacts = {} for i,", "ground. if np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff] = 1. else: endeff_contact[it][eff]", "the base orientation to always start with flat # oriented", "the kinematics results for it. self.inv_kin.forward_robot(q, dq) self.fill_kinematic_result(it, q, dq)", "# q_max = np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi,", "the ground. t = [cnt[i].start_time(), cnt[i].end_time()] for idx in range(3):", "mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]),", "res = self.inv_kin.compute(q, dq, com_ref, lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact,", ".7*np.pi]).T # q_via0 = np.vstack((q_via.T, q_jump)) # self.q_via = np.vstack((q_via0,", "converge for initial setup.') print(\"initial configuration: \\n\", q) self.q_init =", "dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:] def optimize_initial_position(self, init_state): # Optimize the", "3)) self.com_kin = np.zeros((self.num_time_steps, 3)) self.lmom_kin = np.zeros((self.num_time_steps, 3)) self.amom_kin", "(np.matrix(self.joint_des[:,it]).T - q[7 : ]) # joint_regularization_ref = self.reg_joint_position *", "= 2. self.joint_des = None def reset(self): self.kinematics_sequence = KinematicsSequence()", "- q[7 : ]) # Fill the kinematics results for", "name in self.hip_names] self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff in", "momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper from momentumopt.kinoptpy.min_jerk_traj import * from pymomentum import", "end_time = contact_.end_time position = contact_.position contacts[eff].append(Contact(position, start_time, end_time)) return", "= contact_.start_time end_time = contact_.end_time position = contact_.position contacts[eff].append(Contact(position, start_time,", "is a contact following, add the transition between # the", "self.com_dyn[it], self.lmom_dyn[it], amom_ref, self.endeff_pos_ref[it], self.endeff_vel_ref[it], self.endeff_contact[it], joint_regularization_ref) # Integrate to", "i in range (len(q_via[:,0])+1): if i==0: t = [0, q_via[0,0]/self.dt]", "(self.reg_orientation * se3.log((quad_goal * quad_q.inverse()).matrix()).T + self.amom_dyn[it]).reshape(-1) joint_regularization_ref = self.reg_joint_position", "positions and velocities. Returns endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref: np.array, shape=[num_time_steps,", "com_ref, lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref, endeff_contact, None) q = se3.integrate(self.robot.model,", "num_contacts = len(contact_states(i)) contacts[eff] = [] for j in range(num_contacts):", "-0.2 ,-0.1, 0.2 ,-0.1, 0.2] # q_via = np.matrix([.75, np.pi/2,", "else: joint_traj_gen.joint_traj(self.q_via) for it in range(self.num_time_steps): self.joint_des[:,it] = joint_traj_gen.eval_traj(it) #", "Returns endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y,", "self.inv_kin.ne)) } def fill_data_from_dynamics(self): # The centroidal information for it", "endeff_pos_ref, endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object): def __init__(self): self.dt =.01 self.num_time_steps", "q) self.q_init = q.copy() self.dq_init = dq.copy() def optimize(self, init_state,", "= None if idx == 2: via = z_offset +", "q = se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions) if len(plan_joint_init_pos) !=", "in range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact = np.ones(init_state.effNum()) quad_goal", "= [self.robot.model.getFrameId(name) for name in self.hip_names] self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1])", "def position(self): return self.pos def start_time(self): return self.init_time def end_time(self):", "= 0.1 def get_z_bound(self, mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound)", "self.min_bound) return z_max, z_min def __call__(self, mom_kin_optimizer): ''' Computes the", "__init__(self): self.q_init = None self.dq_init = None self.reg_orientation = 1e-2", "velocity trajectories. endeff_pos_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref = np.zeros((num_time_steps,", "- framesVel(self.hip_ids) # Storing on the kinematic sequence. kinematic_state =", "for i in range(num_contacts): # Create a constant polynominal for", "self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact = \\ self.endeff_traj_generator(self) def fill_kinematic_result(self, it, q,", "kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities", "!= self.robot.num_ctrl_joints: raise ValueError( 'Number of joints in config file", "get_z_bound(self, mom_kin_optimizer): z_max = min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:,", "3)) endeff_contact = np.zeros((num_time_steps, num_eff)) for it in range(num_time_steps): for", "= [] for i in range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj, [PolynominalList()])", "poly = poly_points(t, self.q_init[j], q_via[i,j+1]) self.poly_traj[j].append(t, poly) elif(i==len(q_via[:,0])): t =", "= 0 q, dq = self.q_init.copy(), self.dq_init.copy() for it in", "float(q[3]), float(q[4]), float(q[5])) amom_ref = (self.reg_orientation * se3.log((quad_goal * quad_q.inverse()).matrix()).T", "num_eff, 3={x, y, z}] [1]: endeff_vel_ref: np.array, shape=[num_time_steps, num_eff, 3={x,", ".7*np.pi, -.7*np.pi/2, .7*np.pi]).T # q_via0 = np.vstack((q_via.T, q_jump)) # self.q_via", "start_time self.final_time = end_time def position(self): return self.pos def start_time(self):", "is for jump, should go to config file # q_jump", ": ]) # Fill the kinematics results for it. self.inv_kin.forward_robot(q,", "import zero from pymomentum import * from momentumopt.quadruped.quadruped_wrapper import QuadrupedWrapper", "print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this is for", "in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If there is a contact", "[self.robot.model.getFrameId(name) for name in self.hip_names] self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1]) for", "break if iters == self.max_iterations - 1: print('Failed to converge", "and Max Planck Gesellschaft. @date 2019-10-08 ''' import os import", "endeff_pos_ref, endeff_vel_ref [0]: endeff_pos_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}]", "None: self.optimize_initial_position(init_state) # Get the desired joint trajectory # print", "self.inv_kin.robot.centroidalMomentum(q, dq) # Storing on the internal array. self.com_kin[it] =", "self.dq_init.copy() for it in range(self.num_time_steps): quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T))", "the velocity is zero, assume the endeffector is in #", "self.amom_kin[it] = hg.angular.T self.q_kin[it] = q.T self.dq_kin[it] = dq.T #", "# returns end eff trajectories return eff_traj_poly class EndeffectorTrajectoryGenerator(object): def", "se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) quad_q = se3.Quaternion(float(q[6]), float(q[3]), float(q[4]), float(q[5])) amom_ref", "self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def initialize(self, planner_setting, max_iterations=50, eps=0.001,", "add the transition between # the two contact points. if", "= hg.linear.T self.amom_kin[it] = hg.angular.T self.q_kin[it] = q.T self.dq_kin[it] =", "the two contact points. if i < num_contacts - 1:", "self.reg_orientation = 1e-2 self.reg_joint_position = 2. self.joint_des = None def", "np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_contact =", "= np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height + 0.32 dq = np.matrix(np.zeros(self.robot.robot.nv)).T", "= get_contact_plan(mom_kin_optimizer.contact_sequence.contact_states, mom_kin_optimizer.eff_names) # Generate minimum jerk trajectories eff_traj_poly =", "for endeffector on the ground. t = [cnt[i].start_time(), cnt[i].end_time()] for", "there is a contact following, add the transition between #", "None self.q_init = None self.poly_traj = None def joint_traj(self, q_via):", "dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:] def optimize_initial_position(self, init_state):", "q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t, poly) else: t = [q_via[i-1,0]/self.dt, q_via[i,0]/self.dt] poly", "Create a constant polynominal for endeffector on the ground. t", "i in range(init_state.effNum())]) endeff_vel_ref = np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact = np.ones(init_state.effNum())", "q[7 : ]) # Fill the kinematics results for it.", "np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0, 0.]).T)) q[3:7] = quad_goal.coeffs() for", "= dq[6:] def optimize_initial_position(self, init_state): # Optimize the initial configuration", "generate_eff_traj(contacts, self.z_offset) # Compute the endeffector position and velocity trajectories.", "poly_traj # returns end eff trajectories return eff_traj_poly class EndeffectorTrajectoryGenerator(object):", "for i in range(3)] endeff_vel_ref[it][eff] = [eff_traj_poly[name][i].deval(it * dt) for", "q.T self.dq_kin[it] = dq.T # The endeffector informations as well.", "@package momentumopt @author <NAME> (<EMAIL>) @license License BSD-3-Clause @copyright Copyright", "# contact with the ground. if np.all(endeff_vel_ref[it][eff] == 0.): endeff_contact[it][eff]", "= max_iterations self.eps = eps self.robot = RobotWrapper() self.reset() #", "numpy as np from momentumopt.kinoptpy.qp import QpSolver from momentumopt.kinoptpy.inverse_kinematics import", "the optimization for the initial configuration only once. if self.q_init", "if i==0: t = [0, q_via[0,0]/self.dt] poly = poly_points(t, self.q_init[j],", "(<EMAIL>) @license License BSD-3-Clause @copyright Copyright (c) 2019, New York", "self.eps = eps self.robot = RobotWrapper() self.reset() # Holds dynamics", "q_via0 = np.vstack((q_via.T, q_jump)) # self.q_via = np.vstack((q_via0, q_max.T)) joint_traj_gen", "( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T q[2] = self.robot.floor_height +", "self.lmom_kin[it] = hg.linear.T self.amom_kin[it] = hg.angular.T self.q_kin[it] = q.T self.dq_kin[it]", "on the kinematic sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it]", "the initial configuration q = se3.neutral(self.robot.model) plan_joint_init_pos = self.planner_setting.get( PlannerVectorParam_KinematicDefaultJointPositions)", "self.lmom_dyn[it] = self.dynamic_sequence.dynamics_states[it].lmom self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref,", "= np.matrix([1.35, .7*np.pi/2, -.7*np.pi, .7*np.pi/2, -.7*np.pi, -.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T", "= None def reset(self): self.kinematics_sequence = KinematicsSequence() self.kinematics_sequence.resize(self.planner_setting.get(PlannerIntParam_NumTimesteps), self.planner_setting.get(PlannerIntParam_NumDofs)) def", "but robot expects %d joints.' % ( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:]", "Run the optimization for the initial configuration only once. if", "= np.vstack((q_via0, q_max.T)) joint_traj_gen = JointTrajectoryGenerator() joint_traj_gen.num_time_steps = self.num_time_steps joint_traj_gen.q_init", "return self.pos def start_time(self): return self.init_time def end_time(self): return self.final_time", "= np.matrix(np.zeros((init_state.effNum(), 3))) endeff_contact = np.ones(init_state.effNum()) quad_goal = se3.Quaternion(se3.rpy.rpyToMatrix(np.matrix([0.0, 0,", "PlannerIntParam_NumTimesteps, \\ PlannerDoubleParam_TimeStep class Contact(object): def __init__(self, position, start_time, end_time):", "trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this", "= joint_traj_gen.eval_traj(it) # Compute inverse kinematics over the full trajectory.", "3 * self.inv_kin.ne)), 'trajectory_wrt_base': np.zeros((self.num_time_steps, 3 * self.inv_kin.ne)), 'velocity_wrt_base': np.zeros((self.num_time_steps,", "np.linalg.norm(res) < 1e-3: print('Found initial configuration after {} iterations'.format(iters +", "expects %d joints.' % ( len(plan_joint_init_pos), self.robot.num_ctrl_joints)) q[7:] = np.matrix(plan_joint_init_pos).T", "@file momentum_kinematics_optimizer.py @package momentumopt @author <NAME> (<EMAIL>) @license License BSD-3-Clause", "Get the desired joint trajectory # print \"num_joint_via:\",self.planner_setting.get(PlannerIntParam_NumJointViapoints) # print", "kinematic_state.robot_posture.base_position = q[:3] kinematic_state.robot_posture.base_orientation = q[3:7] kinematic_state.robot_posture.joint_positions = q[7:] kinematic_state.robot_velocity.base_linear_velocity", "np.matrix([.75, np.pi/2, -np.pi, np.pi/2, -np.pi, -np.pi/2, np.pi, -np.pi/2, np.pi]).T #", "polynominal for endeffector on the ground. t = [cnt[i].start_time(), cnt[i].end_time()]", "= None self.poly_traj = None def joint_traj(self, q_via): self.poly_traj =", "effs: cnt = contacts[eff] num_contacts = len(cnt) poly_traj = [", "and velocity trajectories. endeff_pos_ref = np.zeros((num_time_steps, num_eff, 3)) endeff_vel_ref =", "fill_data_from_dynamics(self): # The centroidal information for it in range(self.num_time_steps): self.com_dyn[it]", "self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return z_max, z_min def", "sequence. kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it]", "-.7*np.pi/2, .7*np.pi, -.7*np.pi/2, .7*np.pi]).T # q_via0 = np.vstack((q_via.T, q_jump)) #", "[1]: endeff_vel_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}] ''' dt", "self.fill_endeffector_trajectory() # Run the optimization for the initial configuration only", "config file not same as required for robot\\n' + 'Got", "joint_traj(self, q_via): self.poly_traj = [] for i in range(len(self.q_init)): self.poly_traj", "# print \"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this is for jump, should", "enumerate(mom_kin_optimizer.eff_names): endeff_pos_ref[it][eff] = [eff_traj_poly[name][i].eval(it * dt) for i in range(3)]", "j in range(len(self.q_init)): q[0,j] = self.poly_traj[j].eval(t) return np.matrix(q) class MomentumKinematicsOptimizer(object):", "= np.zeros((self.num_time_steps, 3)) self.lmom_dyn = np.zeros((self.num_time_steps, 3)) self.amom_dyn = np.zeros((self.num_time_steps,", "planner_setting if endeff_traj_generator is None: endeff_traj_generator = EndeffectorTrajectoryGenerator() self.endeff_traj_generator =", "Adding small P controller for the base orientation to always", "RobotWrapper() self.reset() # Holds dynamics and kinematics results self.com_dyn =", "None def joint_traj(self, q_via): self.poly_traj = [] for i in", "init_state): # Optimize the initial configuration q = se3.neutral(self.robot.model) plan_joint_init_pos", "idx).dot(dq)[:3] for idx in frames ]).reshape(-1) data = self.inv_kin.robot.data hg", "= {} for i, eff in enumerate(effs): num_contacts = len(contact_states(i))", "max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return z_max, z_min def __call__(self, mom_kin_optimizer): '''", "constant polynominal for endeffector on the ground. t = [cnt[i].start_time(),", "= min(max(mom_kin_optimizer.com_dyn[:, 2]), self.max_bound) z_min = max(min(mom_kin_optimizer.com_dyn[:, 2]), self.min_bound) return", "endeff_vel_ref, endeff_contact class JointTrajectoryGenerator(object): def __init__(self): self.dt =.01 self.num_time_steps =", "[] for i in range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj, [PolynominalList()]) for", "self.amom_dyn[it] = self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact = \\", "= poly_points(t, cnt[i].position()[idx], cnt[i+1].position()[idx], via) poly_traj[idx].append(t, poly) eff_traj_poly[eff] = poly_traj", "momentumopt.kinoptpy.qp import QpSolver from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from pinocchio import", "self.q_kin = np.zeros((self.num_time_steps, self.robot.model.nq)) self.dq_kin = np.zeros((self.num_time_steps, self.robot.model.nv)) self.hip_names =", "= ['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff in self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model,", "RobotWrapper=QuadrupedWrapper): self.planner_setting = planner_setting if endeff_traj_generator is None: endeff_traj_generator =", "* self.inv_kin.ne)) } def fill_data_from_dynamics(self): # The centroidal information for", "from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from pinocchio import RobotWrapper import pinocchio", "[cnt[i].start_time(), cnt[i].end_time()] for idx in range(3): poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If", "pinocchio as se3 from pinocchio.utils import zero from pymomentum import", "= self.reg_joint_position * (self.q_init[7 : ] - q[7 : ])", "y, z}] [1]: endeff_vel_ref: np.array, shape=[num_time_steps, num_eff, 3={x, y, z}]", "# Compute the endeffector position and velocity trajectories. endeff_pos_ref =", "kinematic_state = self.kinematics_sequence.kinematics_states[it] kinematic_state.com = self.com_kin[it] kinematic_state.lmom = self.lmom_kin[it] kinematic_state.amom", "iters == self.max_iterations - 1: print('Failed to converge for initial", "return np.vstack([data.oMf[idx].translation for idx in frames]).reshape(-1) def framesVel(frames): return np.vstack([", "endeffector informations as well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne", "the next state. q = se3.integrate(self.robot.model, q, dq * self.dt)", "well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne + 2) *", "np from momentumopt.kinoptpy.qp import QpSolver from momentumopt.kinoptpy.inverse_kinematics import PointContactInverseKinematics from", "initial configuration only once. if self.q_init is None: self.optimize_initial_position(init_state) #", "EndeffectorTrajectoryGenerator() self.endeff_traj_generator = endeff_traj_generator self.dt = planner_setting.get(PlannerDoubleParam_TimeStep) self.num_time_steps = planner_setting.get(PlannerIntParam_NumTimesteps)", "init_state, contact_sequence, dynamic_sequence, plotting=False): self.init_state = init_state self.contact_sequence = contact_sequence", "self.robot.effs] self.hip_ids = [self.robot.model.getFrameId(name) for name in self.hip_names] self.eff_names =", "kinematic_state.robot_velocity.base_linear_velocity = dq[:3] kinematic_state.robot_velocity.base_angular_velocity = dq[3:6] kinematic_state.robot_velocity.joint_velocities = dq[6:] def", "# The centroidal information for it in range(self.num_time_steps): self.com_dyn[it] =", "return z_max, z_min def __call__(self, mom_kin_optimizer): ''' Computes the endeffector", "i in range(3)] # HACK: If the velocity is zero,", "print('Failed to converge for initial setup.') print(\"initial configuration: \\n\", q)", "= [] for j in range(num_contacts): contact_ = contact_states(i)[j] start_time", "quad_q.inverse()).matrix()) res = self.inv_kin.compute(q, dq, com_ref, lmom_ref, amom_ref, endeff_pos_ref, endeff_vel_ref,", "t = [q_via[i-1,0]/self.dt, self.num_time_steps] poly = poly_points(t, q_via[i-1,j+1], self.q_init[j]) self.poly_traj[j].append(t,", "for i in range(len(self.q_init)): self.poly_traj = np.append(self.poly_traj, [PolynominalList()]) for j", "self.lmom_dyn = np.zeros((self.num_time_steps, 3)) self.amom_dyn = np.zeros((self.num_time_steps, 3)) self.com_kin =", "self.hip_names] self.eff_names = ['{}_{}'.format(eff, self.robot.joints_list[-1]) for eff in self.robot.effs] self.inv_kin", "this is for jump, should go to config file #", "= self.dynamic_sequence.dynamics_states[it].amom def fill_endeffector_trajectory(self): self.endeff_pos_ref, self.endeff_vel_ref, self.endeff_contact = \\ self.endeff_traj_generator(self)", "np.pi/2, -np.pi, np.pi/2, -np.pi, -np.pi/2, np.pi, -np.pi/2, np.pi]).T # q_max", "self.reg_joint_position * (self.q_init[7 : ] - q[7 : ]) #", "self.robot.effs] self.inv_kin = PointContactInverseKinematics(self.robot.model, self.eff_names) self.motion_eff = { 'trajectory': np.zeros((self.num_time_steps,", "between # the two contact points. if i < num_contacts", "''' Computes the endeffector positions and velocities. Returns endeff_pos_ref, endeff_vel_ref", "poly_traj[idx].append(t, constant_poly(cnt[i].position()[idx])) # If there is a contact following, add", "PlannerDoubleParam_TimeStep class Contact(object): def __init__(self, position, start_time, end_time): self.pos =", "as well. self.motion_eff['trajectory'][it] = framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne + 2)", "= framesPos(self.inv_kin.endeff_ids) self.motion_eff['velocity'][it] = self.inv_kin.J[6:(self.inv_kin.ne + 2) * 3].dot(dq).T self.motion_eff['trajectory_wrt_base'][it]", "\"joint_via:\",self.planner_setting.get(PlannerCVectorParam_JointViapoints) # TODO: this is for jump, should go to", "class EndeffectorTrajectoryGenerator(object): def __init__(self): self.z_offset = 0.1 def get_z_bound(self, mom_kin_optimizer):", "dynamic_sequence, plotting=False): self.init_state = init_state self.contact_sequence = contact_sequence self.dynamic_sequence =" ]
[ "logging from gullveig import bootstrap_default_logger # Configure default logging def", "<reponame>Addvilz/gullveig import logging from gullveig import bootstrap_default_logger # Configure default", "import bootstrap_default_logger # Configure default logging def _configure_default_web_logger(): logger =", "import logging from gullveig import bootstrap_default_logger # Configure default logging", "bootstrap_default_logger # Configure default logging def _configure_default_web_logger(): logger = logging.getLogger('gullveig-web')", "logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api') bootstrap_default_logger(api_logger) aio_logger =", "bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api') bootstrap_default_logger(api_logger) aio_logger = logging.getLogger('aiohttp.server') bootstrap_default_logger(aio_logger) _configure_default_web_logger()", "def _configure_default_web_logger(): logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api') bootstrap_default_logger(api_logger)", "= logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api') bootstrap_default_logger(api_logger) aio_logger = logging.getLogger('aiohttp.server')", "logging def _configure_default_web_logger(): logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api')", "Configure default logging def _configure_default_web_logger(): logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger", "# Configure default logging def _configure_default_web_logger(): logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger)", "logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api') bootstrap_default_logger(api_logger) aio_logger = logging.getLogger('aiohttp.server') bootstrap_default_logger(aio_logger)", "_configure_default_web_logger(): logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger = logging.getLogger('gullveig-api') bootstrap_default_logger(api_logger) aio_logger", "gullveig import bootstrap_default_logger # Configure default logging def _configure_default_web_logger(): logger", "default logging def _configure_default_web_logger(): logger = logging.getLogger('gullveig-web') bootstrap_default_logger(logger) api_logger =", "from gullveig import bootstrap_default_logger # Configure default logging def _configure_default_web_logger():" ]
[ "Http authentication server. \"\"\" ) appid = Unicode( None, allow_none=True,", "import json import urllib import os import jupyterhub from tornado.httpclient", "import gen class HttpAuthenticator(Authenticator): server = Unicode( None, allow_none=True, config=True,", ") @gen.coroutine def authenticate(self, handler, data): http_client = AsyncHTTPClient() headers", "\"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\", } params = dict( type=\"json\", appid=self.appid,", "= HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert = False ) resp", "jupyterhub from tornado.httpclient import HTTPRequest, AsyncHTTPClient from traitlets import Unicode", "\"User-Agent\": \"JupyterHub\", } params = dict( type=\"json\", appid=self.appid, ac=data['username'], pw=data['password']", "help=\"\"\" Application Id recognized by the http authentication server \"\"\"", "= dict( type=\"json\", appid=self.appid, ac=data['username'], pw=data['password'] ) req = HTTPRequest(self.server,", "'replace')) if reply.get(\"code\") == 200: return (reply.get(\"data\").get(\"UserCN\")) else: return None", "Unicode from jupyterhub.auth import Authenticator from tornado import gen class", "= AsyncHTTPClient() headers = { \"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\", }", ") resp = yield http_client.fetch(req) reply = json.loads(resp.body.decode('utf8', 'replace')) if", "HTTPRequest, AsyncHTTPClient from traitlets import Unicode from jupyterhub.auth import Authenticator", "Application Id recognized by the http authentication server \"\"\" )", "None, allow_none=True, config=True, help=\"\"\" Http authentication server. \"\"\" ) appid", "{ \"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\", } params = dict( type=\"json\",", "params = dict( type=\"json\", appid=self.appid, ac=data['username'], pw=data['password'] ) req =", "None, allow_none=True, config=True, help=\"\"\" Application Id recognized by the http", "http authentication server \"\"\" ) @gen.coroutine def authenticate(self, handler, data):", "allow_none=True, config=True, help=\"\"\" Application Id recognized by the http authentication", "method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert = False ) resp = yield", "from traitlets import Unicode from jupyterhub.auth import Authenticator from tornado", ") req = HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert = False", "def authenticate(self, handler, data): http_client = AsyncHTTPClient() headers = {", "\"JupyterHub\", } params = dict( type=\"json\", appid=self.appid, ac=data['username'], pw=data['password'] )", "@gen.coroutine def authenticate(self, handler, data): http_client = AsyncHTTPClient() headers =", "Id recognized by the http authentication server \"\"\" ) @gen.coroutine", "resp = yield http_client.fetch(req) reply = json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\")", "Authenticator from tornado import gen class HttpAuthenticator(Authenticator): server = Unicode(", "import Authenticator from tornado import gen class HttpAuthenticator(Authenticator): server =", "authentication server. \"\"\" ) appid = Unicode( None, allow_none=True, config=True,", "AsyncHTTPClient from traitlets import Unicode from jupyterhub.auth import Authenticator from", "config=True, help=\"\"\" Application Id recognized by the http authentication server", "handler, data): http_client = AsyncHTTPClient() headers = { \"Accept\": \"application/json\",", "validate_cert = False ) resp = yield http_client.fetch(req) reply =", "traitlets import Unicode from jupyterhub.auth import Authenticator from tornado import", "appid=self.appid, ac=data['username'], pw=data['password'] ) req = HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params),", "= { \"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\", } params = dict(", "= json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\") == 200: return (reply.get(\"data\").get(\"UserCN\")) else:", "gen class HttpAuthenticator(Authenticator): server = Unicode( None, allow_none=True, config=True, help=\"\"\"", "by the http authentication server \"\"\" ) @gen.coroutine def authenticate(self,", "recognized by the http authentication server \"\"\" ) @gen.coroutine def", "False ) resp = yield http_client.fetch(req) reply = json.loads(resp.body.decode('utf8', 'replace'))", "\"application/json\", \"User-Agent\": \"JupyterHub\", } params = dict( type=\"json\", appid=self.appid, ac=data['username'],", "http_client.fetch(req) reply = json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\") == 200: return", "reply = json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\") == 200: return (reply.get(\"data\").get(\"UserCN\"))", "\"\"\" ) @gen.coroutine def authenticate(self, handler, data): http_client = AsyncHTTPClient()", ") appid = Unicode( None, allow_none=True, config=True, help=\"\"\" Application Id", "\"\"\" ) appid = Unicode( None, allow_none=True, config=True, help=\"\"\" Application", "jupyterhub.auth import Authenticator from tornado import gen class HttpAuthenticator(Authenticator): server", "body=urllib.parse.urlencode(params), validate_cert = False ) resp = yield http_client.fetch(req) reply", "Unicode( None, allow_none=True, config=True, help=\"\"\" Application Id recognized by the", "allow_none=True, config=True, help=\"\"\" Http authentication server. \"\"\" ) appid =", "} params = dict( type=\"json\", appid=self.appid, ac=data['username'], pw=data['password'] ) req", "authenticate(self, handler, data): http_client = AsyncHTTPClient() headers = { \"Accept\":", "= False ) resp = yield http_client.fetch(req) reply = json.loads(resp.body.decode('utf8',", "pw=data['password'] ) req = HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert =", "= Unicode( None, allow_none=True, config=True, help=\"\"\" Http authentication server. \"\"\"", "http_client = AsyncHTTPClient() headers = { \"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\",", "from tornado import gen class HttpAuthenticator(Authenticator): server = Unicode( None,", "import urllib import os import jupyterhub from tornado.httpclient import HTTPRequest,", "import HTTPRequest, AsyncHTTPClient from traitlets import Unicode from jupyterhub.auth import", "import os import jupyterhub from tornado.httpclient import HTTPRequest, AsyncHTTPClient from", "the http authentication server \"\"\" ) @gen.coroutine def authenticate(self, handler,", "headers=headers, body=urllib.parse.urlencode(params), validate_cert = False ) resp = yield http_client.fetch(req)", "from tornado.httpclient import HTTPRequest, AsyncHTTPClient from traitlets import Unicode from", "json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\") == 200: return (reply.get(\"data\").get(\"UserCN\")) else: return", "ac=data['username'], pw=data['password'] ) req = HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert", "server = Unicode( None, allow_none=True, config=True, help=\"\"\" Http authentication server.", "server. \"\"\" ) appid = Unicode( None, allow_none=True, config=True, help=\"\"\"", "server \"\"\" ) @gen.coroutine def authenticate(self, handler, data): http_client =", "HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert = False ) resp =", "appid = Unicode( None, allow_none=True, config=True, help=\"\"\" Application Id recognized", "yield http_client.fetch(req) reply = json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\") == 200:", "headers = { \"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\", } params =", "HttpAuthenticator(Authenticator): server = Unicode( None, allow_none=True, config=True, help=\"\"\" Http authentication", "urllib import os import jupyterhub from tornado.httpclient import HTTPRequest, AsyncHTTPClient", "from jupyterhub.auth import Authenticator from tornado import gen class HttpAuthenticator(Authenticator):", "class HttpAuthenticator(Authenticator): server = Unicode( None, allow_none=True, config=True, help=\"\"\" Http", "data): http_client = AsyncHTTPClient() headers = { \"Accept\": \"application/json\", \"User-Agent\":", "Unicode( None, allow_none=True, config=True, help=\"\"\" Http authentication server. \"\"\" )", "tornado import gen class HttpAuthenticator(Authenticator): server = Unicode( None, allow_none=True,", "authentication server \"\"\" ) @gen.coroutine def authenticate(self, handler, data): http_client", "req = HTTPRequest(self.server, method=\"POST\", headers=headers, body=urllib.parse.urlencode(params), validate_cert = False )", "tornado.httpclient import HTTPRequest, AsyncHTTPClient from traitlets import Unicode from jupyterhub.auth", "type=\"json\", appid=self.appid, ac=data['username'], pw=data['password'] ) req = HTTPRequest(self.server, method=\"POST\", headers=headers,", "dict( type=\"json\", appid=self.appid, ac=data['username'], pw=data['password'] ) req = HTTPRequest(self.server, method=\"POST\",", "import jupyterhub from tornado.httpclient import HTTPRequest, AsyncHTTPClient from traitlets import", "= Unicode( None, allow_none=True, config=True, help=\"\"\" Application Id recognized by", "json import urllib import os import jupyterhub from tornado.httpclient import", "help=\"\"\" Http authentication server. \"\"\" ) appid = Unicode( None,", "= yield http_client.fetch(req) reply = json.loads(resp.body.decode('utf8', 'replace')) if reply.get(\"code\") ==", "AsyncHTTPClient() headers = { \"Accept\": \"application/json\", \"User-Agent\": \"JupyterHub\", } params", "import Unicode from jupyterhub.auth import Authenticator from tornado import gen", "os import jupyterhub from tornado.httpclient import HTTPRequest, AsyncHTTPClient from traitlets", "config=True, help=\"\"\" Http authentication server. \"\"\" ) appid = Unicode(" ]
[ "is already on GPU if you use use_gpu. Arguments:- data_loader", "stepper object and then use one of the above annelaing", "return_fig:bool=None): \"\"\" It will take the losses and lrs returned", "# NOT TO BE MODIFIED def lr_range(model, lr): \"\"\" Build", "x in losses] fig, ax = plt.subplots(1, 1) ax.plot(lrs, losses)", "* (end-start) def annealing_exp(start, end, pct:float): \"Exponentially anneal from `start`", "TO BE MODIFIED class SmoothenValue(): \"Create a smooth moving average", "= [lr.stop/10.]*(num_layer-1) + [lr.stop] return np.array(res) # NOT TO BE", "for p in c.parameters()] for c in m.children()],[]) for p", "beta:float): self.beta,self.n,self.mov_avg = beta,0,0 def add_value(self, val:float)->None: \"Add `val` to", "learning_rate vs losses graph. It is the only function from", "step = mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i) for i in range(num_layer)])", "BE MODIFIED class Stepper(): \"\"\" Used to step from start,", "Stepper(): \"\"\" Used to step from start, end ('vals') over", "The learning rate at which to end lr_find (default=10) num_it", "in c.parameters()] for c in m.children()],[]) for p in m.parameters():", "== 0 or smooth_loss < best_loss: best_loss = smooth_loss iteration", "modules.\" children = list(m.children()) children_p = sum([[id(p) for p in", "start, end ('vals') over 'n_iter' iterations on a schedule. We", "you give inputs to your model. inputs, labels = data", "annealing_exp(start, end, pct:float): \"Exponentially anneal from `start` to `end` as", "-> If you want to see the point where the", "use for training model (default GPU) Returns: losses :- list", "use_gpu. Arguments:- data_loader :- torch.utils.data.DataLoader model :- torch.nn.Module loss_fn :-", "TO BE MODIFIED class Stepper(): \"\"\" Used to step from", "plot learning_rate vs losses graph. It is the only function", "else start_lr end_lr = lr_range(model, end_lr) end_lr = np.array(end_lr) if", "early (default=True) smooth_beta :- The beta value to smoothen the", "torch import torch.nn as nn import numpy as np import", "if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print(\"Failed to compute", "model(inputs) loss = loss_fn(outputs, labels) ##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu()) else:", "opt.zero_grad() ################### TO BE MODIFIED ################### # Depending on your", "not be enough points.\") return print(f\"Min numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg],", "lr. \"\"\" def __init__(self, vals, n_iter:int, func=None): self.start, self.end =", "lrs returned by lr_find as input. Arguments:- skip_start -> It", "loss function (default=0.98) use_gpu :- True (train on GPU) else", "NOT -> flatten_model # NOT -> lr_range # NOT ->", "return end + (start-end) * (1-pct)**degree # NOT TO BE", "ax.plot(lrs[mg], losses[mg], markersize=10, marker='o', color='red') if return_fig is not None:", "lrs from the end suggestion -> If you want to", "parameters not registered in modules.\" children = list(m.children()) children_p =", "comment these lines for group in opt.param_groups: for param in", "start skip_end -> It will skip skip_end lrs from the", "If you want to see the point where the gradient", "Build differential learning rate from lr. It will give you", "= sched.start for i in range(epochs): for data in data_loader:", "want to change it. But in cases # when you", "YES -> lr_find # NOT -> plot_lr_find # NOT TO", "end + (start-end)/2 * cos_out def do_annealing_poly(start, end, pct:float, degree):", "lr new_lr = sched.step() lrs.append(new_lr) for group in opt.param_groups: group['lr']", "inputs.to(device) labels = labels.to(device) outputs = model(inputs) loss = loss_fn(outputs,", "end_lr), num_it, anneal_func) smoothener = SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader))) #", "# Set optimizer learning_rate = start_lr for group in opt.param_groups:", "want to use Adam, comment these lines for group in", "True break ##################################################### if iteration%10 == 0: print(f'Iteration: {iteration}') if", "would give us the values of lr. Liks for linearly", "lr to end lr. \"\"\" def __init__(self, vals, n_iter:int, func=None):", "You necessarily don't want to change it. But in cases", "children.append(ParameterModule(p)) return children # NOT TO BE MODIFIED flatten_model =", "################### # For AdamW. If you want to use Adam,", "`start` to `end` as pct goes from 0.0 to 1.0.\"", "opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp):", "rate from where to start in lr_find (default=1e-7) end_lr :-", "in opt.param_groups: group['lr'] = sched.start for i in range(epochs): for", "= (np.gradient(np.array(losses))).argmin() except: print(\"Failed to compute the gradients, there might", "your model. inputs, labels = data if use_gpu: inputs =", "the only function from lr_find.py that you will call. By", ":- torch.optim.Optimizer wd :- weight decay (default=0). start_lr :- The", "iterations for lr_find (default=100) stop_div :- If the loss diverges,", "\"No annealing, always return `start`.\" return start def annealing_linear(start, end,", "= 0 losses = [] lrs = [] lrs.append(start_lr) start_lr", "lr_range(model, start_lr) start_lr = np.array(start_lr) if isinstance(start_lr, (tuple, list)) else", "it will use GPU. It assumes your model is already", "param in group['params']: param.data = param.data.add(-wd * group['lr'], param.data) #####################################################", "input. Arguments:- skip_start -> It will skip skip_start lrs from", "These are the functions that would give us the values", "losses lrs :- list of all lrs that we test", "def annealing_linear(start, end, pct:float): \"Linearly anneal from `start` to `end`", "= [] lrs = [] lrs.append(start_lr) start_lr = lr_range(model, start_lr)", "statement \"\"\" lrs = lrs[skip_start:-skip_end] if skip_end > 0 else", "# To be used to flatten_model def children_and_parameters(m:nn.Module): \"Return the", "we test \"\"\" model.train() stop = False flag = False", "iteration = 0 losses = [] lrs = [] lrs.append(start_lr)", "numpy as np import matplotlib.pyplot as plt # NOT ->", "linearly # increasing lr we would use annealing_linear. # You", "on your model, you will have to modify your #", "= np.array([lr.start*(step**i) for i in range(num_layer)]) else: res = [lr.stop/10.]*(num_layer-1)", "for p in m.parameters(): if id(p) not in children_p: children.append(ParameterModule(p))", "producing lr. # By defualt annealing_exp is used for both", "Stepper((start_lr, end_lr), num_it, anneal_func) smoothener = SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader)))", "BE MODIFIED # These are the functions that would give", "end, pct:float, degree): return end + (start-end) * (1-pct)**degree #", "end + (start-end) * (1-pct)**degree # NOT TO BE MODIFIED", "on GPU) else CPU anneal_func :- The step function you", "see the point where the gradient changes most return_fig ->", "# TO BE MODIFIED IN SOME CASES def lr_find(data_loader, model,", "SOME CASES def lr_find(data_loader, model, loss_fn, opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10,", "of losses lrs :- list of all lrs that we", "lr we would use annealing_linear. # You can add your", "skip_start -> It will skip skip_start lrs from the start", "{iteration}') if flag: break # Load state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state)", "at which to end lr_find (default=10) num_it :- Number of", "mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i) for i in range(num_layer)]) else: res", "model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder is complete.') return losses, lrs", "use annealing_linear. # You can add your own custom function,", "children_and_parameters(m:nn.Module): \"Return the children of `m` and its direct parameters", "to step from start lr to end lr. \"\"\" def", "use use_gpu. Arguments:- data_loader :- torch.utils.data.DataLoader model :- torch.nn.Module loss_fn", "##################################################### opt.step() # Change lr new_lr = sched.step() lrs.append(new_lr) for", "0 else losses[skip_start:] losses = [x.item() for x in losses]", "ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg =", "smoothened version of losses lrs :- list of all lrs", "for producing lr. # By defualt annealing_exp is used for", "lrs = [] lrs.append(start_lr) start_lr = lr_range(model, start_lr) start_lr =", "pct def annealing_cos(start, end, pct:float): \"Cosine anneal from `start` to", "TO BE MODIFIED ################### # You necessarily don't want to", "# You can add your own custom function, for producing", "else [m] # NOT TO BE MODIFIED def lr_range(model, lr):", "iterations on a schedule. We will create a stepper object", "BE MODIFIED ################### # Depending on your model, you will", "+ (start-end) * (1-pct)**degree # NOT TO BE MODIFIED class", "annelaing functions, to step from start lr to end lr.", "model.train() stop = False flag = False best_loss = 0.", "losses = losses[skip_start:-skip_end] if skip_end > 0 else losses[skip_start:] losses", "i in range(num_layer)]) else: res = [lr.stop/10.]*(num_layer-1) + [lr.stop] return", "= p def forward(self, x): return x # NOT TO", "updated smoothed value.\" self.n += 1 self.mov_avg = self.beta *", "def plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\" It will", "to use (default exp) device :- Torch device to use", "# Depending on your model, you will have to modify", "functions, to step from start lr to end lr. \"\"\"", "None: self.func = annealing_linear if isinstance(vals, tuple) else annealing_no else:", "group['lr'] = sched.start for i in range(epochs): for data in", "pct:float): \"Linearly anneal from `start` to `end` as pct goes", "data_loader: opt.zero_grad() ################### TO BE MODIFIED ################### # Depending on", "of lr. Liks for linearly # increasing lr we would", "return end + (start-end)/2 * cos_out def do_annealing_poly(start, end, pct:float,", "\"Cosine anneal from `start` to `end` as pct goes from", "loss.backward() ################### TO BE MODIFIED ################### # For AdamW. If", "momentum def annealing_no(start, end, pct:float): \"No annealing, always return `start`.\"", "return self.func(self.start, self.end, self.n/self.n_iter) @property def is_done(self)->bool: \"Return 'True' if", "epochs = int(np.ceil(num_it/len(data_loader))) # save model_dict model_state = model.state_dict() opt_state", "iteration += 1 if sched.is_done or (stop_div and (smooth_loss >", "how you give inputs to your model. inputs, labels =", "To be used to flatten_model def children_and_parameters(m:nn.Module): \"Return the children", "* pct) + 1 return end + (start-end)/2 * cos_out", "\"\"\" Used to step from start, end ('vals') over 'n_iter'", "NOT TO BE MODIFIED class SmoothenValue(): \"Create a smooth moving", "model, loss_fn, opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True,", "# NOT -> plot_lr_find # NOT TO BE MODIFIED class", "avergae of the loss function (default=0.98) use_gpu :- True (train", "num_layer = len([nn.Sequential(*flatten_model(model))]) if lr.start: mult = lr.stop / lr.start", "model (default GPU) Returns: losses :- list of smoothened version", "TO BE MODIFIED ################### # For AdamW. If you want", "lrs[skip_start:-skip_end] if skip_end > 0 else lrs[skip_start:] losses = losses[skip_start:-skip_end]", ":- torch.nn.Module lr :- float or slice Returns: Depending upon", "above annelaing functions, to step from start lr to end", "NOT -> scheduling functions # NOT -> SmoothenValue # YES", "0 def step(self): \"Return next value along annealed schedule\" self.n", "It assumes your model is already on GPU if you", "smooth_beta :- The beta value to smoothen the running avergae", "################### # Depending on your model, you will have to", "Arguments:- skip_start -> It will skip skip_start lrs from the", "start def annealing_linear(start, end, pct:float): \"Linearly anneal from `start` to", "annealing_exp is used for both lr and momentum def annealing_no(start,", "(vals,0) self.n_iter = max(1, n_iter) if func is None: self.func", "lr.start: mult = lr.stop / lr.start step = mult**(1/(num_layer-1)) res", "= np.array(end_lr) if isinstance(end_lr, (tuple, list)) else end_lr sched =", "pct:float): \"Exponentially anneal from `start` to `end` as pct goes", "= param.data.add(-wd * group['lr'], param.data) ##################################################### opt.step() # Change lr", "take the losses and lrs returned by lr_find as input.", "if id(p) not in children_p: children.append(ParameterModule(p)) return children # NOT", "lr \"\"\" if not isinstance(lr, slice): return lr num_layer =", "(default=100) stop_div :- If the loss diverges, then stop early", "break # Load state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder", "children_and_parameters # NOT -> flatten_model # NOT -> lr_range #", "NOT -> lr_range # NOT -> scheduling functions # NOT", "start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The", "version of losses lrs :- list of all lrs that", "np.array(end_lr) if isinstance(end_lr, (tuple, list)) else end_lr sched = Stepper((start_lr,", "you will have to modify your # data pipeline and", "in a module\" def __init__(self, p:nn.Parameter): super().__init__() self.val = p", "assumes your model is already on GPU if you use", "(default=10) num_it :- Number of iterations for lr_find (default=100) stop_div", "param.data.add(-wd * group['lr'], param.data) ##################################################### opt.step() # Change lr new_lr", "else annealing_no else: self.func = func self.n = 0 def", "'n_iter' iterations on a schedule. We will create a stepper", "torch.nn.Module loss_fn :- torch.nn.LossFunction opt :- torch.optim.Optimizer wd :- weight", "children_p: children.append(ParameterModule(p)) return children # NOT TO BE MODIFIED flatten_model", "lr.start step = mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i) for i in", "pct:float): \"Cosine anneal from `start` to `end` as pct goes", "Arguments: model :- torch.nn.Module lr :- float or slice Returns:", "if isinstance(vals, tuple) else (vals,0) self.n_iter = max(1, n_iter) if", "learning rate at which to end lr_find (default=10) num_it :-", "if not isinstance(lr, slice): return lr num_layer = len([nn.Sequential(*flatten_model(model))]) if", "maximizing the loss, then you will have # to change", "4*best_loss or torch.isnan(loss))): flag = True break ##################################################### if iteration%10", "# NOT TO BE MODIFIED def plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5,", "upon lr \"\"\" if not isinstance(lr, slice): return lr num_layer", "data if use_gpu: inputs = inputs.to(device) labels = labels.to(device) outputs", "model :- torch.nn.Module lr :- float or slice Returns: Depending", "(smooth_loss > 4*best_loss or torch.isnan(loss))): flag = True break #####################################################", "BE MODIFIED IN SOME CASES def lr_find(data_loader, model, loss_fn, opt,", "self.beta) * val self.smooth = self.mov_avg / (1 - self.beta", "torch.nn.LossFunction opt :- torch.optim.Optimizer wd :- weight decay (default=0). start_lr", "end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The main", ":- The beta value to smoothen the running avergae of", "################### # You necessarily don't want to change it. But", "loss diverges, then stop early (default=True) smooth_beta :- The beta", "for linearly # increasing lr we would use annealing_linear. #", "to 1.0.\" return start * (end/start) ** pct def annealing_cos(start,", "or (stop_div and (smooth_loss > 4*best_loss or torch.isnan(loss))): flag =", "-> scheduling functions # NOT -> SmoothenValue # YES ->", "(vals[0], vals[1]) if isinstance(vals, tuple) else (vals,0) self.n_iter = max(1,", "opt.step() # Change lr new_lr = sched.step() lrs.append(new_lr) for group", "int(np.ceil(num_it/len(data_loader))) # save model_dict model_state = model.state_dict() opt_state = opt.state_dict()", "step function you want to use (default exp) device :-", "if sched.is_done or (stop_div and (smooth_loss > 4*best_loss or torch.isnan(loss))):", "self.func = annealing_linear if isinstance(vals, tuple) else annealing_no else: self.func", "start_lr) start_lr = np.array(start_lr) if isinstance(start_lr, (tuple, list)) else start_lr", "-> SmoothenValue # YES -> lr_find # NOT -> plot_lr_find", "################### TO BE MODIFIED ################### # You necessarily don't want", "etc) using `beta`.\" def __init__(self, beta:float): self.beta,self.n,self.mov_avg = beta,0,0 def", "It will skip skip_start lrs from the start skip_end ->", "return np.array(res) # NOT TO BE MODIFIED # These are", "return statement \"\"\" lrs = lrs[skip_start:-skip_end] if skip_end > 0", "= data if use_gpu: inputs = inputs.to(device) labels = labels.to(device)", "change it. if iteration == 0 or smooth_loss < best_loss:", "NOT -> plot_lr_find # NOT TO BE MODIFIED class ParameterModule(nn.Module):", "smooth_loss < best_loss: best_loss = smooth_loss iteration += 1 if", "# By defualt annealing_exp is used for both lr and", "where to start in lr_find (default=1e-7) end_lr :- The learning", "for group in opt.param_groups: group['lr'] = new_lr ################### TO BE", "new_lr ################### TO BE MODIFIED ################### # You necessarily don't", "inputs, labels = data if use_gpu: inputs = inputs.to(device) labels", "# increasing lr we would use annealing_linear. # You can", "If you want to use Adam, comment these lines for", "beta,0,0 def add_value(self, val:float)->None: \"Add `val` to calculate updated smoothed", "lrs that we test \"\"\" model.train() stop = False flag", "print(f'LR Finder is complete.') return losses, lrs # NOT TO", "opt.param_groups: group['lr'] = sched.start for i in range(epochs): for data", "not isinstance(lr, slice): return lr num_layer = len([nn.Sequential(*flatten_model(model))]) if lr.start:", "class ParameterModule(nn.Module): \"Register a lone parameter 'p' in a module\"", "will call. By default it will use GPU. It assumes", "Returns: losses :- list of smoothened version of losses lrs", "# NOT TO BE MODIFIED class Stepper(): \"\"\" Used to", "outputs = model(inputs) loss = loss_fn(outputs, labels) ##################################################### if use_gpu:", "-> plot_lr_find # NOT TO BE MODIFIED class ParameterModule(nn.Module): \"Register", "n_iter) if func is None: self.func = annealing_linear if isinstance(vals,", "data_loader :- torch.utils.data.DataLoader model :- torch.nn.Module loss_fn :- torch.nn.LossFunction opt", "SmoothenValue(): \"Create a smooth moving average for a value (loss,", "or torch.isnan(loss))): flag = True break ##################################################### if iteration%10 ==", "suggestion:bool=False, return_fig:bool=None): \"\"\" It will take the losses and lrs", "in lr_find (default=1e-7) end_lr :- The learning rate at which", "> 4*best_loss or torch.isnan(loss))): flag = True break ##################################################### if", "return losses, lrs # NOT TO BE MODIFIED def plot_lr_find(losses,", "-> children_and_parameters # NOT -> flatten_model # NOT -> lr_range", "annealing_no else: self.func = func self.n = 0 def step(self):", "True (train on GPU) else CPU anneal_func :- The step", "the above annelaing functions, to step from start lr to", "< best_loss: best_loss = smooth_loss iteration += 1 if sched.is_done", "id(p) not in children_p: children.append(ParameterModule(p)) return children # NOT TO", "= len([nn.Sequential(*flatten_model(model))]) if lr.start: mult = lr.stop / lr.start step", "children of `m` and its direct parameters not registered in", "1.0.\" cos_out = np.cos(np.pi * pct) + 1 return end", "fig, ax = plt.subplots(1, 1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\")", "there might not be enough points.\") return print(f\"Min numerical gradient:", "labels) ##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth", "sched.step() lrs.append(new_lr) for group in opt.param_groups: group['lr'] = new_lr ###################", "def lr_range(model, lr): \"\"\" Build differential learning rate from lr.", "end lr_find (default=10) num_it :- Number of iterations for lr_find", "and lrs returned by lr_find as input. Arguments:- skip_start ->", "skip skip_end lrs from the end suggestion -> If you", "(loss, etc) using `beta`.\" def __init__(self, beta:float): self.beta,self.n,self.mov_avg = beta,0,0", "is complete.') return losses, lrs # NOT TO BE MODIFIED", "lrs # NOT TO BE MODIFIED def plot_lr_find(losses, lrs, skip_start:int=10,", "will call to plot learning_rate vs losses graph. It is", "in opt.param_groups: group['lr'] = new_lr ################### TO BE MODIFIED ###################", "lrs.pop() print(f'LR Finder is complete.') return losses, lrs # NOT", "True then get the fig in the return statement \"\"\"", "slice): return lr num_layer = len([nn.Sequential(*flatten_model(model))]) if lr.start: mult =", "self.beta * self.mov_avg + (1 - self.beta) * val self.smooth", "inputs to your model. inputs, labels = data if use_gpu:", "losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg", "points.\") return print(f\"Min numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10, marker='o',", ":- The learning rate at which to end lr_find (default=10)", "children # NOT TO BE MODIFIED flatten_model = lambda m:", "def step(self): \"Return next value along annealed schedule\" self.n +=", "a schedule. We will create a stepper object and then", "# YES -> lr_find # NOT -> plot_lr_find # NOT", "try: mg = (np.gradient(np.array(losses))).argmin() except: print(\"Failed to compute the gradients,", "It will skip skip_end lrs from the end suggestion ->", "the fig in the return statement \"\"\" lrs = lrs[skip_start:-skip_end]", "c in m.children()],[]) for p in m.parameters(): if id(p) not", "from start, end ('vals') over 'n_iter' iterations on a schedule.", "optimizer learning_rate = start_lr for group in opt.param_groups: group['lr'] =", "tuple) else annealing_no else: self.func = func self.n = 0", ":- The learning rate from where to start in lr_find", "if use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth losses.append(smooth_loss) loss.backward()", "= sum([[id(p) for p in c.parameters()] for c in m.children()],[])", "def do_annealing_poly(start, end, pct:float, degree): return end + (start-end) *", "GPU. It assumes your model is already on GPU if", "use_gpu :- True (train on GPU) else CPU anneal_func :-", "MODIFIED ################### # For AdamW. If you want to use", "val self.smooth = self.mov_avg / (1 - self.beta ** self.n)", "`beta`.\" def __init__(self, beta:float): self.beta,self.n,self.mov_avg = beta,0,0 def add_value(self, val:float)->None:", "Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except:", "flatten_model # NOT -> lr_range # NOT -> scheduling functions", "NOT TO BE MODIFIED # To be used to flatten_model", ":- True (train on GPU) else CPU anneal_func :- The", "import numpy as np import matplotlib.pyplot as plt # NOT", "would use annealing_linear. # You can add your own custom", "losses[skip_start:] losses = [x.item() for x in losses] fig, ax", "will take the losses and lrs returned by lr_find as", "TO BE MODIFIED # These are the functions that would", "num_it, anneal_func) smoothener = SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader))) # save", "return lr num_layer = len([nn.Sequential(*flatten_model(model))]) if lr.start: mult = lr.stop", "group in opt.param_groups: for param in group['params']: param.data = param.data.add(-wd", "state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder is complete.') return", "= losses[skip_start:-skip_end] if skip_end > 0 else losses[skip_start:] losses =", "You can add your own custom function, for producing lr.", "-> flatten_model # NOT -> lr_range # NOT -> scheduling", "(default=True) smooth_beta :- The beta value to smoothen the running", "= np.cos(np.pi * pct) + 1 return end + (start-end)/2", "labels.to(device) outputs = model(inputs) loss = loss_fn(outputs, labels) ##################################################### if", "NOT -> children_and_parameters # NOT -> flatten_model # NOT ->", "of smoothened version of losses lrs :- list of all", "smoothener.smooth losses.append(smooth_loss) loss.backward() ################### TO BE MODIFIED ################### # For", "m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else [m] # NOT TO BE", "(default GPU) Returns: losses :- list of smoothened version of", "False best_loss = 0. iteration = 0 losses = []", "return start * (end/start) ** pct def annealing_cos(start, end, pct:float):", "group['params']: param.data = param.data.add(-wd * group['lr'], param.data) ##################################################### opt.step() #", "def is_done(self)->bool: \"Return 'True' if schedule completed\" return self.n >=", "= np.array(start_lr) if isinstance(start_lr, (tuple, list)) else start_lr end_lr =", "model. inputs, labels = data if use_gpu: inputs = inputs.to(device)", "print(f\"Min numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10, marker='o', color='red') if", "MODIFIED def plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\" It", "lrs = lrs[skip_start:-skip_end] if skip_end > 0 else lrs[skip_start:] losses", "in losses] fig, ax = plt.subplots(1, 1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\")", "The learning rate from where to start in lr_find (default=1e-7)", "def annealing_no(start, end, pct:float): \"No annealing, always return `start`.\" return", "add_value(self, val:float)->None: \"Add `val` to calculate updated smoothed value.\" self.n", "* (1-pct)**degree # NOT TO BE MODIFIED class Stepper(): \"\"\"", "flatten_model = lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else [m] #", ":- Number of iterations for lr_find (default=100) stop_div :- If", "lr_range(model, end_lr) end_lr = np.array(end_lr) if isinstance(end_lr, (tuple, list)) else", "for group in opt.param_groups: for param in group['params']: param.data =", "skip skip_start lrs from the start skip_end -> It will", "to end lr. \"\"\" def __init__(self, vals, n_iter:int, func=None): self.start,", "1 return self.func(self.start, self.end, self.n/self.n_iter) @property def is_done(self)->bool: \"Return 'True'", "p:nn.Parameter): super().__init__() self.val = p def forward(self, x): return x", "sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else [m] # NOT TO BE MODIFIED", "self.func = func self.n = 0 def step(self): \"Return next", "you use use_gpu. Arguments:- data_loader :- torch.utils.data.DataLoader model :- torch.nn.Module", "torch.nn.Module lr :- float or slice Returns: Depending upon lr", "weight decay (default=0). start_lr :- The learning rate from where", "(train on GPU) else CPU anneal_func :- The step function", "NOT TO BE MODIFIED class ParameterModule(nn.Module): \"Register a lone parameter", "to start in lr_find (default=1e-7) end_lr :- The learning rate", "__init__(self, beta:float): self.beta,self.n,self.mov_avg = beta,0,0 def add_value(self, val:float)->None: \"Add `val`", "the Arguments: model :- torch.nn.Module lr :- float or slice", "to change it. if iteration == 0 or smooth_loss <", "annealing_no(start, end, pct:float): \"No annealing, always return `start`.\" return start", "`start`.\" return start def annealing_linear(start, end, pct:float): \"Linearly anneal from", "IN SOME CASES def lr_find(data_loader, model, loss_fn, opt, wd:int=0, start_lr:float=1e-7,", "NOT TO BE MODIFIED # These are the functions that", "\"Add `val` to calculate updated smoothed value.\" self.n += 1", "It will take the losses and lrs returned by lr_find", "use GPU. It assumes your model is already on GPU", "################### TO BE MODIFIED ################### # Depending on your model,", "to end lr_find (default=10) num_it :- Number of iterations for", "to calculate updated smoothed value.\" self.n += 1 self.mov_avg =", "lr_find(data_loader, model, loss_fn, opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98,", "= inputs.to(device) labels = labels.to(device) outputs = model(inputs) loss =", "end_lr sched = Stepper((start_lr, end_lr), num_it, anneal_func) smoothener = SmoothenValue(smooth_beta)", "* group['lr'], param.data) ##################################################### opt.step() # Change lr new_lr =", "value along annealed schedule\" self.n += 1 return self.func(self.start, self.end,", "function that you will call to plot learning_rate vs losses", "don't want to change it. But in cases # when", "diverges, then stop early (default=True) smooth_beta :- The beta value", "in opt.param_groups: for param in group['params']: param.data = param.data.add(-wd *", "skip_end lrs from the end suggestion -> If you want", "return_fig -> True then get the fig in the return", "in m.children()],[]) for p in m.parameters(): if id(p) not in", "then use one of the above annelaing functions, to step", "rate from lr. It will give you the Arguments: model", "your own custom function, for producing lr. # By defualt", "returned by lr_find as input. Arguments:- skip_start -> It will", "moving average for a value (loss, etc) using `beta`.\" def", "= mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i) for i in range(num_layer)]) else:", "lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else [m] # NOT TO", "gradient changes most return_fig -> True then get the fig", "data pipeline and how you give inputs to your model.", "AdamW. If you want to use Adam, comment these lines", "start_lr :- The learning rate from where to start in", "BE MODIFIED ################### # For AdamW. If you want to", "ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print(\"Failed to", "annealing_linear. # You can add your own custom function, for", "sum([[id(p) for p in c.parameters()] for c in m.children()],[]) for", "and momentum def annealing_no(start, end, pct:float): \"No annealing, always return", "lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\" It will take the", "change it. But in cases # when you are maximizing", "def __init__(self, p:nn.Parameter): super().__init__() self.val = p def forward(self, x):", "x # NOT TO BE MODIFIED # To be used", "'True' if schedule completed\" return self.n >= self.n_iter # NOT", "function (default=0.98) use_gpu :- True (train on GPU) else CPU", "[] lrs = [] lrs.append(start_lr) start_lr = lr_range(model, start_lr) start_lr", "end_lr = lr_range(model, end_lr) end_lr = np.array(end_lr) if isinstance(end_lr, (tuple,", "end suggestion -> If you want to see the point", "if lr.start: mult = lr.stop / lr.start step = mult**(1/(num_layer-1))", "decay (default=0). start_lr :- The learning rate from where to", "we would use annealing_linear. # You can add your own", "else: smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth losses.append(smooth_loss) loss.backward() ################### TO BE", "self.n) # TO BE MODIFIED IN SOME CASES def lr_find(data_loader,", "lr. It will give you the Arguments: model :- torch.nn.Module", "smoothen the running avergae of the loss function (default=0.98) use_gpu", "skip_end -> It will skip skip_end lrs from the end", "isinstance(end_lr, (tuple, list)) else end_lr sched = Stepper((start_lr, end_lr), num_it,", "not registered in modules.\" children = list(m.children()) children_p = sum([[id(p)", "lr. # By defualt annealing_exp is used for both lr", "fig in the return statement \"\"\" lrs = lrs[skip_start:-skip_end] if", "= beta,0,0 def add_value(self, val:float)->None: \"Add `val` to calculate updated", "lrs.append(start_lr) start_lr = lr_range(model, start_lr) start_lr = np.array(start_lr) if isinstance(start_lr,", "opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder is complete.') return losses, lrs #", "a module\" def __init__(self, p:nn.Parameter): super().__init__() self.val = p def", "MODIFIED def lr_range(model, lr): \"\"\" Build differential learning rate from", "differential learning rate from lr. It will give you the", "else: self.func = func self.n = 0 def step(self): \"Return", "= func self.n = 0 def step(self): \"Return next value", "from `start` to `end` as pct goes from 0.0 to", "num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The main function", "of iterations for lr_find (default=100) stop_div :- If the loss", "np import matplotlib.pyplot as plt # NOT -> ParameterModule #", "test \"\"\" model.train() stop = False flag = False best_loss", "lr_find # NOT -> plot_lr_find # NOT TO BE MODIFIED", "float or slice Returns: Depending upon lr \"\"\" if not", "are the functions that would give us the values of", "= smoothener.smooth losses.append(smooth_loss) loss.backward() ################### TO BE MODIFIED ################### #", "enough points.\") return print(f\"Min numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10,", "value (loss, etc) using `beta`.\" def __init__(self, beta:float): self.beta,self.n,self.mov_avg =", "The main function that you will call to plot learning_rate", "exp) device :- Torch device to use for training model", "smoothener = SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader))) # save model_dict model_state", "pipeline and how you give inputs to your model. inputs,", "the end suggestion -> If you want to see the", "1 return end + (start-end)/2 * cos_out def do_annealing_poly(start, end,", "stop early (default=True) smooth_beta :- The beta value to smoothen", "class SmoothenValue(): \"Create a smooth moving average for a value", "self.mov_avg / (1 - self.beta ** self.n) # TO BE", "lr_find (default=1e-7) end_lr :- The learning rate at which to", "# Load state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder is", "[x.item() for x in losses] fig, ax = plt.subplots(1, 1)", "for param in group['params']: param.data = param.data.add(-wd * group['lr'], param.data)", "you will call. By default it will use GPU. It", "is used for both lr and momentum def annealing_no(start, end,", "and how you give inputs to your model. inputs, labels", "p in m.parameters(): if id(p) not in children_p: children.append(ParameterModule(p)) return", "parameter 'p' in a module\" def __init__(self, p:nn.Parameter): super().__init__() self.val", "flatten_model def children_and_parameters(m:nn.Module): \"Return the children of `m` and its", "create a stepper object and then use one of the", "necessarily don't want to change it. But in cases #", "= int(np.ceil(num_it/len(data_loader))) # save model_dict model_state = model.state_dict() opt_state =", "to use for training model (default GPU) Returns: losses :-", "loss_fn, opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'),", "= 0. iteration = 0 losses = [] lrs =", "for data in data_loader: opt.zero_grad() ################### TO BE MODIFIED ###################", "from lr. It will give you the Arguments: model :-", "MODIFIED ################### # Depending on your model, you will have", "def __init__(self, vals, n_iter:int, func=None): self.start, self.end = (vals[0], vals[1])", "else losses[skip_start:] losses = [x.item() for x in losses] fig,", "import torch import torch.nn as nn import numpy as np", "your model, you will have to modify your # data", "If the loss diverges, then stop early (default=True) smooth_beta :-", "# NOT TO BE MODIFIED class ParameterModule(nn.Module): \"Register a lone", "(end-start) def annealing_exp(start, end, pct:float): \"Exponentially anneal from `start` to", "BE MODIFIED class SmoothenValue(): \"Create a smooth moving average for", "# data pipeline and how you give inputs to your", "(default=1e-7) end_lr :- The learning rate at which to end", "= lr_range(model, end_lr) end_lr = np.array(end_lr) if isinstance(end_lr, (tuple, list))", "might not be enough points.\") return print(f\"Min numerical gradient: {lrs[mg]:.2E}\")", "0.0 to 1.0.\" return start + pct * (end-start) def", "a lone parameter 'p' in a module\" def __init__(self, p:nn.Parameter):", "default it will use GPU. It assumes your model is", "the functions that would give us the values of lr.", "main function that you will call to plot learning_rate vs", "will skip skip_end lrs from the end suggestion -> If", "(stop_div and (smooth_loss > 4*best_loss or torch.isnan(loss))): flag = True", "self.n_iter # NOT TO BE MODIFIED class SmoothenValue(): \"Create a", "break ##################################################### if iteration%10 == 0: print(f'Iteration: {iteration}') if flag:", "or smooth_loss < best_loss: best_loss = smooth_loss iteration += 1", "ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try:", ":- list of smoothened version of losses lrs :- list", "list of smoothened version of losses lrs :- list of", "BE MODIFIED # To be used to flatten_model def children_and_parameters(m:nn.Module):", "= (vals[0], vals[1]) if isinstance(vals, tuple) else (vals,0) self.n_iter =", "which to end lr_find (default=10) num_it :- Number of iterations", "list)) else start_lr end_lr = lr_range(model, end_lr) end_lr = np.array(end_lr)", "# when you are maximizing the loss, then you will", "def annealing_exp(start, end, pct:float): \"Exponentially anneal from `start` to `end`", "pct:float, degree): return end + (start-end) * (1-pct)**degree # NOT", "For AdamW. If you want to use Adam, comment these", "It is the only function from lr_find.py that you will", "0 or smooth_loss < best_loss: best_loss = smooth_loss iteration +=", "len(list(m.children())) else [m] # NOT TO BE MODIFIED def lr_range(model,", "step(self): \"Return next value along annealed schedule\" self.n += 1", "the loss, then you will have # to change it.", "MODIFIED class ParameterModule(nn.Module): \"Register a lone parameter 'p' in a", "if schedule completed\" return self.n >= self.n_iter # NOT TO", "return print(f\"Min numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10, marker='o', color='red')", "annealing_linear if isinstance(vals, tuple) else annealing_no else: self.func = func", "use Adam, comment these lines for group in opt.param_groups: for", "# You necessarily don't want to change it. But in", "for x in losses] fig, ax = plt.subplots(1, 1) ax.plot(lrs,", "= 0 def step(self): \"Return next value along annealed schedule\"", "the loss diverges, then stop early (default=True) smooth_beta :- The", "return x # NOT TO BE MODIFIED # To be", "len([nn.Sequential(*flatten_model(model))]) if lr.start: mult = lr.stop / lr.start step =", ":- list of all lrs that we test \"\"\" model.train()", "MODIFIED IN SOME CASES def lr_find(data_loader, model, loss_fn, opt, wd:int=0,", "return `start`.\" return start def annealing_linear(start, end, pct:float): \"Linearly anneal", "if you use use_gpu. Arguments:- data_loader :- torch.utils.data.DataLoader model :-", "pct:float): \"No annealing, always return `start`.\" return start def annealing_linear(start,", "in group['params']: param.data = param.data.add(-wd * group['lr'], param.data) ##################################################### opt.step()", "increasing lr we would use annealing_linear. # You can add", "in m.parameters(): if id(p) not in children_p: children.append(ParameterModule(p)) return children", "By default it will use GPU. It assumes your model", "[] lrs.append(start_lr) start_lr = lr_range(model, start_lr) start_lr = np.array(start_lr) if", "have # to change it. if iteration == 0 or", "model is already on GPU if you use use_gpu. Arguments:-", "self.end, self.n/self.n_iter) @property def is_done(self)->bool: \"Return 'True' if schedule completed\"", "# to change it. if iteration == 0 or smooth_loss", "to flatten_model def children_and_parameters(m:nn.Module): \"Return the children of `m` and", "most return_fig -> True then get the fig in the", "the running avergae of the loss function (default=0.98) use_gpu :-", "you will call to plot learning_rate vs losses graph. It", "return self.n >= self.n_iter # NOT TO BE MODIFIED class", "forward(self, x): return x # NOT TO BE MODIFIED #", "one of the above annelaing functions, to step from start", "super().__init__() self.val = p def forward(self, x): return x #", "/ (1 - self.beta ** self.n) # TO BE MODIFIED", "these lines for group in opt.param_groups: for param in group['params']:", "def lr_find(data_loader, model, loss_fn, opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True,", "stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The main function that", "= list(m.children()) children_p = sum([[id(p) for p in c.parameters()] for", "lr num_layer = len([nn.Sequential(*flatten_model(model))]) if lr.start: mult = lr.stop /", "start_lr for group in opt.param_groups: group['lr'] = sched.start for i", "(end/start) ** pct def annealing_cos(start, end, pct:float): \"Cosine anneal from", "for group in opt.param_groups: group['lr'] = sched.start for i in", "0 else lrs[skip_start:] losses = losses[skip_start:-skip_end] if skip_end > 0", "torch.optim.Optimizer wd :- weight decay (default=0). start_lr :- The learning", "nn import numpy as np import matplotlib.pyplot as plt #", "BE MODIFIED flatten_model = lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else", "use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth losses.append(smooth_loss) loss.backward() ###################", "= False flag = False best_loss = 0. iteration =", "param.data = param.data.add(-wd * group['lr'], param.data) ##################################################### opt.step() # Change", "'p' in a module\" def __init__(self, p:nn.Parameter): super().__init__() self.val =", "MODIFIED # These are the functions that would give us", "end, pct:float): \"Cosine anneal from `start` to `end` as pct", "vals, n_iter:int, func=None): self.start, self.end = (vals[0], vals[1]) if isinstance(vals,", "* self.mov_avg + (1 - self.beta) * val self.smooth =", "self.beta,self.n,self.mov_avg = beta,0,0 def add_value(self, val:float)->None: \"Add `val` to calculate", "The beta value to smoothen the running avergae of the", "p in c.parameters()] for c in m.children()],[]) for p in", "__init__(self, vals, n_iter:int, func=None): self.start, self.end = (vals[0], vals[1]) if", "the loss function (default=0.98) use_gpu :- True (train on GPU)", "torch.nn as nn import numpy as np import matplotlib.pyplot as", "vs losses graph. It is the only function from lr_find.py", "start lr to end lr. \"\"\" def __init__(self, vals, n_iter:int,", "start_lr = lr_range(model, start_lr) start_lr = np.array(start_lr) if isinstance(start_lr, (tuple,", "* (end/start) ** pct def annealing_cos(start, end, pct:float): \"Cosine anneal", "= lr_range(model, start_lr) start_lr = np.array(start_lr) if isinstance(start_lr, (tuple, list))", "1 if sched.is_done or (stop_div and (smooth_loss > 4*best_loss or", ":- float or slice Returns: Depending upon lr \"\"\" if", "losses graph. It is the only function from lr_find.py that", "loss = loss_fn(outputs, labels) ##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach())", "BE MODIFIED def lr_range(model, lr): \"\"\" Build differential learning rate", "opt_state = opt.state_dict() # Set optimizer learning_rate = start_lr for", "pct goes from 0.0 to 1.0.\" return start * (end/start)", "the gradients, there might not be enough points.\") return print(f\"Min", "return children # NOT TO BE MODIFIED flatten_model = lambda", "from where to start in lr_find (default=1e-7) end_lr :- The", "stop = False flag = False best_loss = 0. iteration", "\"\"\" It will take the losses and lrs returned by", "to compute the gradients, there might not be enough points.\")", "functions that would give us the values of lr. Liks", "# NOT -> ParameterModule # NOT -> children_and_parameters # NOT", "func is None: self.func = annealing_linear if isinstance(vals, tuple) else", "0 losses = [] lrs = [] lrs.append(start_lr) start_lr =", "opt.param_groups: for param in group['params']: param.data = param.data.add(-wd * group['lr'],", "do_annealing_poly(start, end, pct:float, degree): return end + (start-end) * (1-pct)**degree", "as plt # NOT -> ParameterModule # NOT -> children_and_parameters", "best_loss = 0. iteration = 0 losses = [] lrs", "else (vals,0) self.n_iter = max(1, n_iter) if func is None:", "learning rate from where to start in lr_find (default=1e-7) end_lr", "to `end` as pct goes from 0.0 to 1.0.\" return", "goes from 0.0 to 1.0.\" cos_out = np.cos(np.pi * pct)", "NOT TO BE MODIFIED flatten_model = lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if", "+= 1 self.mov_avg = self.beta * self.mov_avg + (1 -", "in modules.\" children = list(m.children()) children_p = sum([[id(p) for p", "MODIFIED ################### # You necessarily don't want to change it.", "By defualt annealing_exp is used for both lr and momentum", "scheduling functions # NOT -> SmoothenValue # YES -> lr_find", "annealing_linear(start, end, pct:float): \"Linearly anneal from `start` to `end` as", "start * (end/start) ** pct def annealing_cos(start, end, pct:float): \"Cosine", "annealing_cos(start, end, pct:float): \"Cosine anneal from `start` to `end` as", ">= self.n_iter # NOT TO BE MODIFIED class SmoothenValue(): \"Create", "##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth losses.append(smooth_loss)", "and (smooth_loss > 4*best_loss or torch.isnan(loss))): flag = True break", "model.state_dict() opt_state = opt.state_dict() # Set optimizer learning_rate = start_lr", "can add your own custom function, for producing lr. #", "torch.isnan(loss))): flag = True break ##################################################### if iteration%10 == 0:", "cos_out def do_annealing_poly(start, end, pct:float, degree): return end + (start-end)", "c.parameters()] for c in m.children()],[]) for p in m.parameters(): if", "rate at which to end lr_find (default=10) num_it :- Number", "to 1.0.\" return start + pct * (end-start) def annealing_exp(start,", "and its direct parameters not registered in modules.\" children =", "you the Arguments: model :- torch.nn.Module lr :- float or", "lr :- float or slice Returns: Depending upon lr \"\"\"", "= [] lrs.append(start_lr) start_lr = lr_range(model, start_lr) start_lr = np.array(start_lr)", "SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader))) # save model_dict model_state = model.state_dict()", "torch.utils.data.DataLoader model :- torch.nn.Module loss_fn :- torch.nn.LossFunction opt :- torch.optim.Optimizer", "class Stepper(): \"\"\" Used to step from start, end ('vals')", "smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth losses.append(smooth_loss) loss.backward() ################### TO", "new_lr = sched.step() lrs.append(new_lr) for group in opt.param_groups: group['lr'] =", "list(m.children()) children_p = sum([[id(p) for p in c.parameters()] for c", "GPU) else CPU anneal_func :- The step function you want", "SmoothenValue # YES -> lr_find # NOT -> plot_lr_find #", "= sched.step() lrs.append(new_lr) for group in opt.param_groups: group['lr'] = new_lr", "complete.') return losses, lrs # NOT TO BE MODIFIED def", "when you are maximizing the loss, then you will have", "def __init__(self, beta:float): self.beta,self.n,self.mov_avg = beta,0,0 def add_value(self, val:float)->None: \"Add", "1.0.\" return start + pct * (end-start) def annealing_exp(start, end,", "\"Return next value along annealed schedule\" self.n += 1 return", "\"Return the children of `m` and its direct parameters not", "to `end` as pct goes from 0.0 to 1.0.\" cos_out", "(start-end)/2 * cos_out def do_annealing_poly(start, end, pct:float, degree): return end", "is_done(self)->bool: \"Return 'True' if schedule completed\" return self.n >= self.n_iter", "a smooth moving average for a value (loss, etc) using", ":- If the loss diverges, then stop early (default=True) smooth_beta", "your model is already on GPU if you use use_gpu.", ":- Torch device to use for training model (default GPU)", "= False best_loss = 0. iteration = 0 losses =", "[m] # NOT TO BE MODIFIED def lr_range(model, lr): \"\"\"", "if iteration == 0 or smooth_loss < best_loss: best_loss =", "that would give us the values of lr. Liks for", "Depending upon lr \"\"\" if not isinstance(lr, slice): return lr", "Returns: Depending upon lr \"\"\" if not isinstance(lr, slice): return", "step from start, end ('vals') over 'n_iter' iterations on a", "end, pct:float): \"Linearly anneal from `start` to `end` as pct", "(1 - self.beta) * val self.smooth = self.mov_avg / (1", "range(num_layer)]) else: res = [lr.stop/10.]*(num_layer-1) + [lr.stop] return np.array(res) #", "goes from 0.0 to 1.0.\" return start * (end/start) **", "model_dict model_state = model.state_dict() opt_state = opt.state_dict() # Set optimizer", "goes from 0.0 to 1.0.\" return start + pct *", "model, you will have to modify your # data pipeline", "calculate updated smoothed value.\" self.n += 1 self.mov_avg = self.beta", "param.data) ##################################################### opt.step() # Change lr new_lr = sched.step() lrs.append(new_lr)", "def children_and_parameters(m:nn.Module): \"Return the children of `m` and its direct", "GPU) Returns: losses :- list of smoothened version of losses", "suggestion -> If you want to see the point where", "to see the point where the gradient changes most return_fig", "labels = data if use_gpu: inputs = inputs.to(device) labels =", ":- weight decay (default=0). start_lr :- The learning rate from", "get the fig in the return statement \"\"\" lrs =", "losses[skip_start:-skip_end] if skip_end > 0 else losses[skip_start:] losses = [x.item()", "return start + pct * (end-start) def annealing_exp(start, end, pct:float):", "plt # NOT -> ParameterModule # NOT -> children_and_parameters #", "except: print(\"Failed to compute the gradients, there might not be", "from the end suggestion -> If you want to see", "TO BE MODIFIED ################### # Depending on your model, you", "vals[1]) if isinstance(vals, tuple) else (vals,0) self.n_iter = max(1, n_iter)", "degree): return end + (start-end) * (1-pct)**degree # NOT TO", "next value along annealed schedule\" self.n += 1 return self.func(self.start,", "loss_fn :- torch.nn.LossFunction opt :- torch.optim.Optimizer wd :- weight decay", "that we test \"\"\" model.train() stop = False flag =", "to modify your # data pipeline and how you give", "TO BE MODIFIED # To be used to flatten_model def", "lrs.append(new_lr) for group in opt.param_groups: group['lr'] = new_lr ################### TO", "print(\"Failed to compute the gradients, there might not be enough", "losses.append(smooth_loss) loss.backward() ################### TO BE MODIFIED ################### # For AdamW.", "m.parameters(): if id(p) not in children_p: children.append(ParameterModule(p)) return children #", "learning rate from lr. It will give you the Arguments:", "+= 1 if sched.is_done or (stop_div and (smooth_loss > 4*best_loss", "We will create a stepper object and then use one", "\"Create a smooth moving average for a value (loss, etc)", "(tuple, list)) else start_lr end_lr = lr_range(model, end_lr) end_lr =", "range(epochs): for data in data_loader: opt.zero_grad() ################### TO BE MODIFIED", "# NOT TO BE MODIFIED # To be used to", "func self.n = 0 def step(self): \"Return next value along", "start in lr_find (default=1e-7) end_lr :- The learning rate at", "# NOT -> flatten_model # NOT -> lr_range # NOT", "# These are the functions that would give us the", "device to use for training model (default GPU) Returns: losses", "flag = True break ##################################################### if iteration%10 == 0: print(f'Iteration:", "It will give you the Arguments: model :- torch.nn.Module lr", "children_p = sum([[id(p) for p in c.parameters()] for c in", "in range(num_layer)]) else: res = [lr.stop/10.]*(num_layer-1) + [lr.stop] return np.array(res)", "if isinstance(vals, tuple) else annealing_no else: self.func = func self.n", "ax = plt.subplots(1, 1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log')", "self.n += 1 self.mov_avg = self.beta * self.mov_avg + (1", "for c in m.children()],[]) for p in m.parameters(): if id(p)", "def annealing_cos(start, end, pct:float): \"Cosine anneal from `start` to `end`", "\"Return 'True' if schedule completed\" return self.n >= self.n_iter #", "device :- Torch device to use for training model (default", "will use GPU. It assumes your model is already on", "# NOT -> children_and_parameters # NOT -> flatten_model # NOT", "only function from lr_find.py that you will call. By default", "\"\"\" model.train() stop = False flag = False best_loss =", "best_loss = smooth_loss iteration += 1 if sched.is_done or (stop_div", "lr_find.py that you will call. By default it will use", "TO BE MODIFIED flatten_model = lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children()))", "\"\"\" The main function that you will call to plot", "* val self.smooth = self.mov_avg / (1 - self.beta **", "to plot learning_rate vs losses graph. It is the only", "as pct goes from 0.0 to 1.0.\" cos_out = np.cos(np.pi", "is None: self.func = annealing_linear if isinstance(vals, tuple) else annealing_no", "CPU anneal_func :- The step function you want to use", "labels = labels.to(device) outputs = model(inputs) loss = loss_fn(outputs, labels)", "1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion:", "direct parameters not registered in modules.\" children = list(m.children()) children_p", "for lr_find (default=100) stop_div :- If the loss diverges, then", "lone parameter 'p' in a module\" def __init__(self, p:nn.Parameter): super().__init__()", "lr and momentum def annealing_no(start, end, pct:float): \"No annealing, always", "cos_out = np.cos(np.pi * pct) + 1 return end +", "** self.n) # TO BE MODIFIED IN SOME CASES def", "0.0 to 1.0.\" return start * (end/start) ** pct def", "call to plot learning_rate vs losses graph. It is the", "isinstance(vals, tuple) else annealing_no else: self.func = func self.n =", "= model.state_dict() opt_state = opt.state_dict() # Set optimizer learning_rate =", "have to modify your # data pipeline and how you", "lrs from the start skip_end -> It will skip skip_end", "# save model_dict model_state = model.state_dict() opt_state = opt.state_dict() #", "pct) + 1 return end + (start-end)/2 * cos_out def", "tuple) else (vals,0) self.n_iter = max(1, n_iter) if func is", "along annealed schedule\" self.n += 1 return self.func(self.start, self.end, self.n/self.n_iter)", "= loss_fn(outputs, labels) ##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss", "use (default exp) device :- Torch device to use for", "iteration%10 == 0: print(f'Iteration: {iteration}') if flag: break # Load", "np.array(start_lr) if isinstance(start_lr, (tuple, list)) else start_lr end_lr = lr_range(model,", "MODIFIED flatten_model = lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else [m]", "object and then use one of the above annelaing functions,", "# NOT -> scheduling functions # NOT -> SmoothenValue #", "skip_end > 0 else lrs[skip_start:] losses = losses[skip_start:-skip_end] if skip_end", "will give you the Arguments: model :- torch.nn.Module lr :-", "= new_lr ################### TO BE MODIFIED ################### # You necessarily", "start_lr = np.array(start_lr) if isinstance(start_lr, (tuple, list)) else start_lr end_lr", "= opt.state_dict() # Set optimizer learning_rate = start_lr for group", "give inputs to your model. inputs, labels = data if", "defualt annealing_exp is used for both lr and momentum def", "end_lr = np.array(end_lr) if isinstance(end_lr, (tuple, list)) else end_lr sched", "if func is None: self.func = annealing_linear if isinstance(vals, tuple)", "pct * (end-start) def annealing_exp(start, end, pct:float): \"Exponentially anneal from", "for i in range(num_layer)]) else: res = [lr.stop/10.]*(num_layer-1) + [lr.stop]", "losses, lrs # NOT TO BE MODIFIED def plot_lr_find(losses, lrs,", "anneal_func :- The step function you want to use (default", "- self.beta ** self.n) # TO BE MODIFIED IN SOME", "ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin()", "schedule. We will create a stepper object and then use", "p def forward(self, x): return x # NOT TO BE", "as pct goes from 0.0 to 1.0.\" return start +", "smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The main function that you", "m.children()],[]) for p in m.parameters(): if id(p) not in children_p:", "used for both lr and momentum def annealing_no(start, end, pct:float):", "MODIFIED class SmoothenValue(): \"Create a smooth moving average for a", "TO BE MODIFIED IN SOME CASES def lr_find(data_loader, model, loss_fn,", "that you will call. By default it will use GPU.", "already on GPU if you use use_gpu. Arguments:- data_loader :-", "you want to use (default exp) device :- Torch device", "of the above annelaing functions, to step from start lr", "be used to flatten_model def children_and_parameters(m:nn.Module): \"Return the children of", "- self.beta) * val self.smooth = self.mov_avg / (1 -", "modify your # data pipeline and how you give inputs", "(np.gradient(np.array(losses))).argmin() except: print(\"Failed to compute the gradients, there might not", "res = np.array([lr.start*(step**i) for i in range(num_layer)]) else: res =", "isinstance(vals, tuple) else (vals,0) self.n_iter = max(1, n_iter) if func", "isinstance(start_lr, (tuple, list)) else start_lr end_lr = lr_range(model, end_lr) end_lr", "where the gradient changes most return_fig -> True then get", "dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder is complete.') return losses,", "\"\"\" lrs = lrs[skip_start:-skip_end] if skip_end > 0 else lrs[skip_start:]", "compute the gradients, there might not be enough points.\") return", "\"Register a lone parameter 'p' in a module\" def __init__(self,", "1 self.mov_avg = self.beta * self.mov_avg + (1 - self.beta)", "`end` as pct goes from 0.0 to 1.0.\" return start", "lrs :- list of all lrs that we test \"\"\"", "+ [lr.stop] return np.array(res) # NOT TO BE MODIFIED #", "slice Returns: Depending upon lr \"\"\" if not isinstance(lr, slice):", "+= 1 return self.func(self.start, self.end, self.n/self.n_iter) @property def is_done(self)->bool: \"Return", "function, for producing lr. # By defualt annealing_exp is used", "== 0: print(f'Iteration: {iteration}') if flag: break # Load state", "anneal_func) smoothener = SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader))) # save model_dict", "0: print(f'Iteration: {iteration}') if flag: break # Load state dict", "* cos_out def do_annealing_poly(start, end, pct:float, degree): return end +", "end_lr :- The learning rate at which to end lr_find", "the values of lr. Liks for linearly # increasing lr", "self.n_iter = max(1, n_iter) if func is None: self.func =", "# NOT TO BE MODIFIED # These are the functions", "= annealing_linear if isinstance(vals, tuple) else annealing_no else: self.func =", "max(1, n_iter) if func is None: self.func = annealing_linear if", "smoothener.add_value(loss.detach()) smooth_loss = smoothener.smooth losses.append(smooth_loss) loss.backward() ################### TO BE MODIFIED", "Set optimizer learning_rate = start_lr for group in opt.param_groups: group['lr']", "in cases # when you are maximizing the loss, then", "cases # when you are maximizing the loss, then you", "call. By default it will use GPU. It assumes your", "losses and lrs returned by lr_find as input. Arguments:- skip_start", "on a schedule. We will create a stepper object and", "used to flatten_model def children_and_parameters(m:nn.Module): \"Return the children of `m`", "from 0.0 to 1.0.\" return start + pct * (end-start)", "= lrs[skip_start:-skip_end] if skip_end > 0 else lrs[skip_start:] losses =", "[lr.stop/10.]*(num_layer-1) + [lr.stop] return np.array(res) # NOT TO BE MODIFIED", "always return `start`.\" return start def annealing_linear(start, end, pct:float): \"Linearly", "skip_end > 0 else losses[skip_start:] losses = [x.item() for x", "model_state = model.state_dict() opt_state = opt.state_dict() # Set optimizer learning_rate", "/ lr.start step = mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i) for i", "model :- torch.nn.Module loss_fn :- torch.nn.LossFunction opt :- torch.optim.Optimizer wd", "as pct goes from 0.0 to 1.0.\" return start *", "losses = [x.item() for x in losses] fig, ax =", "values of lr. Liks for linearly # increasing lr we", "as input. Arguments:- skip_start -> It will skip skip_start lrs", "data in data_loader: opt.zero_grad() ################### TO BE MODIFIED ################### #", "on GPU if you use use_gpu. Arguments:- data_loader :- torch.utils.data.DataLoader", "in range(epochs): for data in data_loader: opt.zero_grad() ################### TO BE", "add your own custom function, for producing lr. # By", "self.beta ** self.n) # TO BE MODIFIED IN SOME CASES", "`val` to calculate updated smoothed value.\" self.n += 1 self.mov_avg", "(default exp) device :- Torch device to use for training", "not in children_p: children.append(ParameterModule(p)) return children # NOT TO BE", "to 1.0.\" cos_out = np.cos(np.pi * pct) + 1 return", "isinstance(lr, slice): return lr num_layer = len([nn.Sequential(*flatten_model(model))]) if lr.start: mult", "('vals') over 'n_iter' iterations on a schedule. We will create", "lr_find (default=10) num_it :- Number of iterations for lr_find (default=100)", "losses :- list of smoothened version of losses lrs :-", "graph. It is the only function from lr_find.py that you", "give us the values of lr. Liks for linearly #", "ParameterModule(nn.Module): \"Register a lone parameter 'p' in a module\" def", "`end` as pct goes from 0.0 to 1.0.\" cos_out =", "= model(inputs) loss = loss_fn(outputs, labels) ##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu())", "import torch.nn as nn import numpy as np import matplotlib.pyplot", "self.n >= self.n_iter # NOT TO BE MODIFIED class SmoothenValue():", "+ (1 - self.beta) * val self.smooth = self.mov_avg /", "= Stepper((start_lr, end_lr), num_it, anneal_func) smoothener = SmoothenValue(smooth_beta) epochs =", "sched.start for i in range(epochs): for data in data_loader: opt.zero_grad()", "from 0.0 to 1.0.\" return start * (end/start) ** pct", "if isinstance(start_lr, (tuple, list)) else start_lr end_lr = lr_range(model, end_lr)", "(start-end) * (1-pct)**degree # NOT TO BE MODIFIED class Stepper():", "in children_p: children.append(ParameterModule(p)) return children # NOT TO BE MODIFIED", "Liks for linearly # increasing lr we would use annealing_linear.", "plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\" It will take", "= start_lr for group in opt.param_groups: group['lr'] = sched.start for", ":- torch.nn.LossFunction opt :- torch.optim.Optimizer wd :- weight decay (default=0).", "own custom function, for producing lr. # By defualt annealing_exp", "in data_loader: opt.zero_grad() ################### TO BE MODIFIED ################### # Depending", "loss_fn(outputs, labels) ##################################################### if use_gpu: smoothener.add_value(loss.detach().cpu()) else: smoothener.add_value(loss.detach()) smooth_loss =", "################### TO BE MODIFIED ################### # For AdamW. If you", "[lr.stop] return np.array(res) # NOT TO BE MODIFIED # These", "gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10, marker='o', color='red') if return_fig is", "the start skip_end -> It will skip skip_end lrs from", "save model_dict model_state = model.state_dict() opt_state = opt.state_dict() # Set", "-> True then get the fig in the return statement", "ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print(\"Failed", "numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10, marker='o', color='red') if return_fig", "= max(1, n_iter) if func is None: self.func = annealing_linear", "mult = lr.stop / lr.start step = mult**(1/(num_layer-1)) res =", "+ (start-end)/2 * cos_out def do_annealing_poly(start, end, pct:float, degree): return", "of the loss function (default=0.98) use_gpu :- True (train on", "The step function you want to use (default exp) device", "custom function, for producing lr. # By defualt annealing_exp is", "from 0.0 to 1.0.\" cos_out = np.cos(np.pi * pct) +", ":- torch.utils.data.DataLoader model :- torch.nn.Module loss_fn :- torch.nn.LossFunction opt :-", "completed\" return self.n >= self.n_iter # NOT TO BE MODIFIED", "losses] fig, ax = plt.subplots(1, 1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning", "functions # NOT -> SmoothenValue # YES -> lr_find #", "or slice Returns: Depending upon lr \"\"\" if not isinstance(lr,", "= SmoothenValue(smooth_beta) epochs = int(np.ceil(num_it/len(data_loader))) # save model_dict model_state =", "= lambda m: sum(map(flatten_model,children_and_parameters(m)),[]) if len(list(m.children())) else [m] # NOT", "lines for group in opt.param_groups: for param in group['params']: param.data", "Number of iterations for lr_find (default=100) stop_div :- If the", "you will have # to change it. if iteration ==", "sched.is_done or (stop_div and (smooth_loss > 4*best_loss or torch.isnan(loss))): flag", "as nn import numpy as np import matplotlib.pyplot as plt", "want to use (default exp) device :- Torch device to", "will skip skip_start lrs from the start skip_end -> It", "value to smoothen the running avergae of the loss function", "sched = Stepper((start_lr, end_lr), num_it, anneal_func) smoothener = SmoothenValue(smooth_beta) epochs", "Load state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR Finder is complete.')", "if iteration%10 == 0: print(f'Iteration: {iteration}') if flag: break #", "plt.subplots(1, 1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e')) if", "# NOT TO BE MODIFIED class SmoothenValue(): \"Create a smooth", "np.array([lr.start*(step**i) for i in range(num_layer)]) else: res = [lr.stop/10.]*(num_layer-1) +", "you want to see the point where the gradient changes", "end_lr) end_lr = np.array(end_lr) if isinstance(end_lr, (tuple, list)) else end_lr", "mg = (np.gradient(np.array(losses))).argmin() except: print(\"Failed to compute the gradients, there", "you want to use Adam, comment these lines for group", "MODIFIED # To be used to flatten_model def children_and_parameters(m:nn.Module): \"Return", "ParameterModule # NOT -> children_and_parameters # NOT -> flatten_model #", "= smooth_loss iteration += 1 if sched.is_done or (stop_div and", "lr_find as input. Arguments:- skip_start -> It will skip skip_start", "use one of the above annelaing functions, to step from", "then get the fig in the return statement \"\"\" lrs", "self.smooth = self.mov_avg / (1 - self.beta ** self.n) #", "lr_range # NOT -> scheduling functions # NOT -> SmoothenValue", "in the return statement \"\"\" lrs = lrs[skip_start:-skip_end] if skip_end", "losses[mg], markersize=10, marker='o', color='red') if return_fig is not None: return", "the return statement \"\"\" lrs = lrs[skip_start:-skip_end] if skip_end >", "\"\"\" Build differential learning rate from lr. It will give", "schedule\" self.n += 1 return self.func(self.start, self.end, self.n/self.n_iter) @property def", "give you the Arguments: model :- torch.nn.Module lr :- float", "to use Adam, comment these lines for group in opt.param_groups:", "list of all lrs that we test \"\"\" model.train() stop", "Change lr new_lr = sched.step() lrs.append(new_lr) for group in opt.param_groups:", "from lr_find.py that you will call. By default it will", "lr_find (default=100) stop_div :- If the loss diverges, then stop", "opt.state_dict() # Set optimizer learning_rate = start_lr for group in", "# NOT -> lr_range # NOT -> scheduling functions #", "you are maximizing the loss, then you will have #", "BE MODIFIED def plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\"", "losses = [] lrs = [] lrs.append(start_lr) start_lr = lr_range(model,", "function from lr_find.py that you will call. By default it", "NOT -> SmoothenValue # YES -> lr_find # NOT ->", "else lrs[skip_start:] losses = losses[skip_start:-skip_end] if skip_end > 0 else", "= [x.item() for x in losses] fig, ax = plt.subplots(1,", "if len(list(m.children())) else [m] # NOT TO BE MODIFIED def", "res = [lr.stop/10.]*(num_layer-1) + [lr.stop] return np.array(res) # NOT TO", "smooth moving average for a value (loss, etc) using `beta`.\"", "using `beta`.\" def __init__(self, beta:float): self.beta,self.n,self.mov_avg = beta,0,0 def add_value(self,", "start_lr end_lr = lr_range(model, end_lr) end_lr = np.array(end_lr) if isinstance(end_lr,", "if flag: break # Load state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop()", "for i in range(epochs): for data in data_loader: opt.zero_grad() ###################", "skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\" It will take the losses", "-> lr_range # NOT -> scheduling functions # NOT ->", "pct goes from 0.0 to 1.0.\" cos_out = np.cos(np.pi *", "suggestion: try: mg = (np.gradient(np.array(losses))).argmin() except: print(\"Failed to compute the", "want to see the point where the gradient changes most", "of all lrs that we test \"\"\" model.train() stop =", "is the only function from lr_find.py that you will call.", "BE MODIFIED ################### # You necessarily don't want to change", "(1-pct)**degree # NOT TO BE MODIFIED class Stepper(): \"\"\" Used", "then stop early (default=True) smooth_beta :- The beta value to", "0.0 to 1.0.\" cos_out = np.cos(np.pi * pct) + 1", "a value (loss, etc) using `beta`.\" def __init__(self, beta:float): self.beta,self.n,self.mov_avg", "function you want to use (default exp) device :- Torch", "best_loss: best_loss = smooth_loss iteration += 1 if sched.is_done or", "a stepper object and then use one of the above", "the losses and lrs returned by lr_find as input. Arguments:-", "TO BE MODIFIED def lr_range(model, lr): \"\"\" Build differential learning", "markersize=10, marker='o', color='red') if return_fig is not None: return fig", "@property def is_done(self)->bool: \"Return 'True' if schedule completed\" return self.n", "-> It will skip skip_end lrs from the end suggestion", "# Change lr new_lr = sched.step() lrs.append(new_lr) for group in", "gradients, there might not be enough points.\") return print(f\"Min numerical", "end, pct:float): \"No annealing, always return `start`.\" return start def", "use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The main function that you will", "lr. Liks for linearly # increasing lr we would use", "training model (default GPU) Returns: losses :- list of smoothened", "NOT TO BE MODIFIED def plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False,", "> 0 else losses[skip_start:] losses = [x.item() for x in", "-> ParameterModule # NOT -> children_and_parameters # NOT -> flatten_model", "val:float)->None: \"Add `val` to calculate updated smoothed value.\" self.n +=", "to your model. inputs, labels = data if use_gpu: inputs", "(tuple, list)) else end_lr sched = Stepper((start_lr, end_lr), num_it, anneal_func)", "value.\" self.n += 1 self.mov_avg = self.beta * self.mov_avg +", "use_gpu: inputs = inputs.to(device) labels = labels.to(device) outputs = model(inputs)", "for both lr and momentum def annealing_no(start, end, pct:float): \"No", "= self.mov_avg / (1 - self.beta ** self.n) # TO", "average for a value (loss, etc) using `beta`.\" def __init__(self,", "anneal_func=annealing_exp): \"\"\" The main function that you will call to", "be enough points.\") return print(f\"Min numerical gradient: {lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg],", "# NOT -> SmoothenValue # YES -> lr_find # NOT", "to smoothen the running avergae of the loss function (default=0.98)", "+ pct * (end-start) def annealing_exp(start, end, pct:float): \"Exponentially anneal", "opt.param_groups: group['lr'] = new_lr ################### TO BE MODIFIED ################### #", "beta value to smoothen the running avergae of the loss", "changes most return_fig -> True then get the fig in", "Torch device to use for training model (default GPU) Returns:", "def forward(self, x): return x # NOT TO BE MODIFIED", "-> It will skip skip_start lrs from the start skip_end", "group in opt.param_groups: group['lr'] = sched.start for i in range(epochs):", "it. if iteration == 0 or smooth_loss < best_loss: best_loss", "are maximizing the loss, then you will have # to", "will create a stepper object and then use one of", "by lr_find as input. Arguments:- skip_start -> It will skip", "registered in modules.\" children = list(m.children()) children_p = sum([[id(p) for", "skip_start lrs from the start skip_end -> It will skip", "if skip_end > 0 else lrs[skip_start:] losses = losses[skip_start:-skip_end] if", "(1 - self.beta ** self.n) # TO BE MODIFIED IN", "loss, then you will have # to change it. if", "to change it. But in cases # when you are", "the point where the gradient changes most return_fig -> True", "group in opt.param_groups: group['lr'] = new_lr ################### TO BE MODIFIED", "np.cos(np.pi * pct) + 1 return end + (start-end)/2 *", "wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100, stop_div:bool=True, smooth_beta:float=0.98, use_gpu:bool=True, device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\"", "self.mov_avg + (1 - self.beta) * val self.smooth = self.mov_avg", "lrs[skip_start:] losses = losses[skip_start:-skip_end] if skip_end > 0 else losses[skip_start:]", "x): return x # NOT TO BE MODIFIED # To", "flag = False best_loss = 0. iteration = 0 losses", "Arguments:- data_loader :- torch.utils.data.DataLoader model :- torch.nn.Module loss_fn :- torch.nn.LossFunction", "TO BE MODIFIED class ParameterModule(nn.Module): \"Register a lone parameter 'p'", "as np import matplotlib.pyplot as plt # NOT -> ParameterModule", "from start lr to end lr. \"\"\" def __init__(self, vals,", "(default=0). start_lr :- The learning rate from where to start", "over 'n_iter' iterations on a schedule. We will create a", "GPU if you use use_gpu. Arguments:- data_loader :- torch.utils.data.DataLoader model", "> 0 else lrs[skip_start:] losses = losses[skip_start:-skip_end] if skip_end >", "from the start skip_end -> It will skip skip_end lrs", "self.end = (vals[0], vals[1]) if isinstance(vals, tuple) else (vals,0) self.n_iter", "self.n = 0 def step(self): \"Return next value along annealed", "learning_rate = start_lr for group in opt.param_groups: group['lr'] = sched.start", "start + pct * (end-start) def annealing_exp(start, end, pct:float): \"Exponentially", "smooth_loss iteration += 1 if sched.is_done or (stop_div and (smooth_loss", "else CPU anneal_func :- The step function you want to", "opt :- torch.optim.Optimizer wd :- weight decay (default=0). start_lr :-", "\"Linearly anneal from `start` to `end` as pct goes from", "list)) else end_lr sched = Stepper((start_lr, end_lr), num_it, anneal_func) smoothener", "smoothed value.\" self.n += 1 self.mov_avg = self.beta * self.mov_avg", "all lrs that we test \"\"\" model.train() stop = False", "will have to modify your # data pipeline and how", "return start def annealing_linear(start, end, pct:float): \"Linearly anneal from `start`", "lr_range(model, lr): \"\"\" Build differential learning rate from lr. It", "self.func(self.start, self.end, self.n/self.n_iter) @property def is_done(self)->bool: \"Return 'True' if schedule", "group['lr'] = new_lr ################### TO BE MODIFIED ################### # You", "CASES def lr_find(data_loader, model, loss_fn, opt, wd:int=0, start_lr:float=1e-7, end_lr:float=10, num_it:int=100,", "inputs = inputs.to(device) labels = labels.to(device) outputs = model(inputs) loss", "`m` and its direct parameters not registered in modules.\" children", "point where the gradient changes most return_fig -> True then", "end, pct:float): \"Exponentially anneal from `start` to `end` as pct", "1.0.\" return start * (end/start) ** pct def annealing_cos(start, end,", "iteration == 0 or smooth_loss < best_loss: best_loss = smooth_loss", "Finder is complete.') return losses, lrs # NOT TO BE", "if skip_end > 0 else losses[skip_start:] losses = [x.item() for", "and then use one of the above annelaing functions, to", ":- torch.nn.Module loss_fn :- torch.nn.LossFunction opt :- torch.optim.Optimizer wd :-", "= plt.subplots(1, 1) ax.plot(lrs, losses) ax.set_ylabel(\"Loss\") ax.set_xlabel(\"Learning Rate\") ax.set_xscale('log') ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%.0e'))", "plot_lr_find # NOT TO BE MODIFIED class ParameterModule(nn.Module): \"Register a", "Adam, comment these lines for group in opt.param_groups: for param", "TO BE MODIFIED def plot_lr_find(losses, lrs, skip_start:int=10, skip_end:int=5, suggestion:bool=False, return_fig:bool=None):", "##################################################### if iteration%10 == 0: print(f'Iteration: {iteration}') if flag: break", "if isinstance(end_lr, (tuple, list)) else end_lr sched = Stepper((start_lr, end_lr),", "MODIFIED class Stepper(): \"\"\" Used to step from start, end", "= True break ##################################################### if iteration%10 == 0: print(f'Iteration: {iteration}')", "self.val = p def forward(self, x): return x # NOT", "device=torch.device('cuda'), anneal_func=annealing_exp): \"\"\" The main function that you will call", "num_it :- Number of iterations for lr_find (default=100) stop_div :-", "** pct def annealing_cos(start, end, pct:float): \"Cosine anneal from `start`", ":- The step function you want to use (default exp)", "group['lr'], param.data) ##################################################### opt.step() # Change lr new_lr = sched.step()", "its direct parameters not registered in modules.\" children = list(m.children())", "flag: break # Load state dict model.load_state_dict(model_state) opt.load_state_dict(opt_state) lrs.pop() print(f'LR", "both lr and momentum def annealing_no(start, end, pct:float): \"No annealing,", "self.mov_avg = self.beta * self.mov_avg + (1 - self.beta) *", "your # data pipeline and how you give inputs to", "lr): \"\"\" Build differential learning rate from lr. It will", "= self.beta * self.mov_avg + (1 - self.beta) * val", "matplotlib.pyplot as plt # NOT -> ParameterModule # NOT ->", "annealed schedule\" self.n += 1 return self.func(self.start, self.end, self.n/self.n_iter) @property", "module\" def __init__(self, p:nn.Parameter): super().__init__() self.val = p def forward(self,", "\"\"\" if not isinstance(lr, slice): return lr num_layer = len([nn.Sequential(*flatten_model(model))])", "__init__(self, p:nn.Parameter): super().__init__() self.val = p def forward(self, x): return", "(default=0.98) use_gpu :- True (train on GPU) else CPU anneal_func", "anneal from `start` to `end` as pct goes from 0.0", "for training model (default GPU) Returns: losses :- list of", "\"\"\" def __init__(self, vals, n_iter:int, func=None): self.start, self.end = (vals[0],", "def add_value(self, val:float)->None: \"Add `val` to calculate updated smoothed value.\"", "else end_lr sched = Stepper((start_lr, end_lr), num_it, anneal_func) smoothener =", "children = list(m.children()) children_p = sum([[id(p) for p in c.parameters()]", "np.array(res) # NOT TO BE MODIFIED # These are the", "else: res = [lr.stop/10.]*(num_layer-1) + [lr.stop] return np.array(res) # NOT", "wd :- weight decay (default=0). start_lr :- The learning rate", "it. But in cases # when you are maximizing the", "stop_div :- If the loss diverges, then stop early (default=True)", "for a value (loss, etc) using `beta`.\" def __init__(self, beta:float):", "will have # to change it. if iteration == 0", "print(f'Iteration: {iteration}') if flag: break # Load state dict model.load_state_dict(model_state)", "end ('vals') over 'n_iter' iterations on a schedule. We will", "i in range(epochs): for data in data_loader: opt.zero_grad() ################### TO", "then you will have # to change it. if iteration", "NOT -> ParameterModule # NOT -> children_and_parameters # NOT ->", "0. iteration = 0 losses = [] lrs = []", "-> lr_find # NOT -> plot_lr_find # NOT TO BE", "of `m` and its direct parameters not registered in modules.\"", "to step from start, end ('vals') over 'n_iter' iterations on", "smooth_loss = smoothener.smooth losses.append(smooth_loss) loss.backward() ################### TO BE MODIFIED ###################", "self.n += 1 return self.func(self.start, self.end, self.n/self.n_iter) @property def is_done(self)->bool:", "self.n/self.n_iter) @property def is_done(self)->bool: \"Return 'True' if schedule completed\" return", "the gradient changes most return_fig -> True then get the", "end lr. \"\"\" def __init__(self, vals, n_iter:int, func=None): self.start, self.end", "# For AdamW. If you want to use Adam, comment", "BE MODIFIED class ParameterModule(nn.Module): \"Register a lone parameter 'p' in", "n_iter:int, func=None): self.start, self.end = (vals[0], vals[1]) if isinstance(vals, tuple)", "step from start lr to end lr. \"\"\" def __init__(self,", "\"Exponentially anneal from `start` to `end` as pct goes from", "self.start, self.end = (vals[0], vals[1]) if isinstance(vals, tuple) else (vals,0)", "= labels.to(device) outputs = model(inputs) loss = loss_fn(outputs, labels) #####################################################", "annealing, always return `start`.\" return start def annealing_linear(start, end, pct:float):", "func=None): self.start, self.end = (vals[0], vals[1]) if isinstance(vals, tuple) else", "if use_gpu: inputs = inputs.to(device) labels = labels.to(device) outputs =", "# NOT TO BE MODIFIED flatten_model = lambda m: sum(map(flatten_model,children_and_parameters(m)),[])", "NOT TO BE MODIFIED class Stepper(): \"\"\" Used to step", "False flag = False best_loss = 0. iteration = 0", "skip_end:int=5, suggestion:bool=False, return_fig:bool=None): \"\"\" It will take the losses and", "+ 1 return end + (start-end)/2 * cos_out def do_annealing_poly(start,", "NOT TO BE MODIFIED def lr_range(model, lr): \"\"\" Build differential", "running avergae of the loss function (default=0.98) use_gpu :- True", "that you will call to plot learning_rate vs losses graph.", "Depending on your model, you will have to modify your", "us the values of lr. Liks for linearly # increasing", "schedule completed\" return self.n >= self.n_iter # NOT TO BE", "import matplotlib.pyplot as plt # NOT -> ParameterModule # NOT", "But in cases # when you are maximizing the loss,", "{lrs[mg]:.2E}\") ax.plot(lrs[mg], losses[mg], markersize=10, marker='o', color='red') if return_fig is not", "the children of `m` and its direct parameters not registered", "Used to step from start, end ('vals') over 'n_iter' iterations", "= lr.stop / lr.start step = mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i)", "lr.stop / lr.start step = mult**(1/(num_layer-1)) res = np.array([lr.start*(step**i) for", "pct goes from 0.0 to 1.0.\" return start + pct" ]
[ "thread.GetNumFrames() for i in range(depth - 1): frame = thread.GetFrameAtIndex(i)", "should be breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason =", "self.TraceOn(): print() print(function) if function: # Get all instructions for", "front of constructore. # We should maybe make another testcase", "constructor.\"\"\" self.build() self.breakOnCtor() # Now use the Python API to", "mydir = TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble each call frame when", "lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth = thread.GetNumFrames() for i in range(depth -", "'// Set break point at this line.') def breakOnCtor(self): \"\"\"Setup/run", "get at each function on the call stack and #", "frameRE = re.compile(r\"\"\" ^\\s\\sframe # heading for the frame info,", "in front of constructore. # We should maybe make another", "inst in insts: # We could simply do 'print inst'", "Find the line number to break for main.cpp. self.line =", "insts = function.GetInstructions(target) for inst in insts: # We could", "the call stack and # disassemble it. target = self.dbg.GetSelectedTarget()", "= function.GetInstructions(target) for inst in insts: # We could simply", "the ctor function of class C. # self.expect(\"thread backtrace\", BACKTRACE_DISPLAYED_CORRECTLY,", "self.build() self.breakOnCtor() raw_output = self.res.GetOutput() frameRE = re.compile(r\"\"\" ^\\s\\sframe #", "# the rest ' + ....' \"\"\", re.VERBOSE) for line", "setUp(self): # Call super's setUp(). TestBase.setUp(self) # Find the line", "be breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint", "out the disassembly. # But we want to print to", "this function and print them out. insts = function.GetInstructions(target) for", "exe, CURRENT_EXECUTABLE_SET) # Break on the ctor function of class", "function on the call stack and # disassemble it. target", "(bpno)]) # This test was failing because we fail to", "We could simply do 'print inst' to print out the", "depth = thread.GetNumFrames() for i in range(depth - 1): frame", "whole testcase for an inessential issue. # We should be", "lldbsuite.test import lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def test_and_run_command(self):", "constructor.\"\"\" self.build() self.breakOnCtor() raw_output = self.res.GetOutput() frameRE = re.compile(r\"\"\" ^\\s\\sframe", "to get at each function on the call stack and", "breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint %d.'", "class C. bpno = lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\",", "# Get all instructions for this function and print them", "the lldb disassemble command on each call frame when stopped", "# True. disasm = str(inst) if self.TraceOn(): print(disasm) def setUp(self):", "CURRENT_EXECUTABLE_SET) # Break on the ctor function of class C.", "function of class C. # self.expect(\"thread backtrace\", BACKTRACE_DISPLAYED_CORRECTLY, # substrs", "def test_and_run_command(self): \"\"\"Disassemble each call frame when stopped on C's", "# disassemble it. target = self.dbg.GetSelectedTarget() process = target.GetProcess() thread", "function header. if self.TraceOn(): print() print(function) if function: # Get", "= frame.GetFunction() # Print the function header. if self.TraceOn(): print()", "line.') def breakOnCtor(self): \"\"\"Setup/run the program so it stops on", "and \\sa.out`(.+) # module`function, and \\s\\+\\s # the rest '", "call frame when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() #", "it. target = self.dbg.GetSelectedTarget() process = target.GetProcess() thread = lldbutil.get_stopped_thread(", "self.assertIsNotNone(thread) depth = thread.GetNumFrames() for i in range(depth - 1):", "Python API to get at each function on the call", "= thread.GetNumFrames() for i in range(depth - 1): frame =", "on C's constructor.\"\"\" self.build() self.breakOnCtor() raw_output = self.res.GetOutput() frameRE =", "out. insts = function.GetInstructions(target) for inst in insts: # We", "time import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import", "....' \"\"\", re.VERBOSE) for line in raw_output.split(os.linesep): match = frameRE.search(line)", "want to print to stdout only if self.TraceOn() is #", "we want to print to stdout only if self.TraceOn() is", "that specifically, but we shouldn't # fail this whole testcase", "# fail this whole testcase for an inessential issue. #", "this whole testcase for an inessential issue. # We should", "module`function, and \\s\\+\\s # the rest ' + ....' \"\"\",", "# the frame pc, and \\sa.out`(.+) # module`function, and \\s\\+\\s", "ctor function of class C. bpno = lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\",", "in range(depth - 1): frame = thread.GetFrameAtIndex(i) function = frame.GetFunction()", "The stop reason of the thread should be breakpoint. self.expect(\"thread", "if self.TraceOn(): print(disasm) def setUp(self): # Call super's setUp(). TestBase.setUp(self)", "break for main.cpp. self.line = line_number('main.cpp', '// Set break point", "at each function on the call stack and # disassemble", "we shouldn't # fail this whole testcase for an inessential", "if self.TraceOn(): print() print(function) if function: # Get all instructions", "so it stops on C's constructor.\"\"\" exe = os.path.join(os.getcwd(), \"a.out\")", "stop reason of the thread should be breakpoint. self.expect(\"thread list\",", "to cover that specifically, but we shouldn't # fail this", "and print them out. insts = function.GetInstructions(target) for inst in", "= str(inst) if self.TraceOn(): print(disasm) def setUp(self): # Call super's", "We should maybe make another testcase to cover that specifically,", "line) #print(\"function:\", function) self.runCmd(\"disassemble -n '%s'\" % function) @add_test_categories(['pyapi']) def", "# wildcard, and 0x[0-9a-f]{16} # the frame pc, and \\sa.out`(.+)", "self.build() self.breakOnCtor() # Now use the Python API to get", "\"\"\" Test the lldb disassemble command on each call frame", "test_and_python_api(self): \"\"\"Disassemble each call frame when stopped on C's constructor.\"\"\"", "* from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class", "Test the lldb disassemble command on each call frame when", "# Now use the Python API to get at each", "lldb disassemble command on each call frame when stopped on", "= self.dbg.GetSelectedTarget() process = target.GetProcess() thread = lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint)", "test was failing because we fail to put the C::", "ctor. \"\"\" from __future__ import print_function import os import time", "%d.' % (bpno)]) # This test was failing because we", "break point at this line.') def breakOnCtor(self): \"\"\"Setup/run the program", "self.dbg.GetSelectedTarget() process = target.GetProcess() thread = lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread)", "this line.') def breakOnCtor(self): \"\"\"Setup/run the program so it stops", "' + ....' \"\"\", re.VERBOSE) for line in raw_output.split(os.linesep): match", "stopped on C's ctor. \"\"\" from __future__ import print_function import", "def breakOnCtor(self): \"\"\"Setup/run the program so it stops on C's", "-n '%s'\" % function) @add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble each call", "the disassembly. # But we want to print to stdout", "\"\"\"Disassemble each call frame when stopped on C's constructor.\"\"\" self.build()", "RUN_SUCCEEDED) # The stop reason of the thread should be", "fail this whole testcase for an inessential issue. # We", "test_and_run_command(self): \"\"\"Disassemble each call frame when stopped on C's constructor.\"\"\"", "ctor function of class C. # self.expect(\"thread backtrace\", BACKTRACE_DISPLAYED_CORRECTLY, #", "\"a.out\") self.runCmd(\"file \" + exe, CURRENT_EXECUTABLE_SET) # Break on the", "cover that specifically, but we shouldn't # fail this whole", "program so it stops on C's constructor.\"\"\" exe = os.path.join(os.getcwd(),", "the Python API to get at each function on the", "self, \"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) # The stop reason", "self.runCmd(\"run\", RUN_SUCCEEDED) # The stop reason of the thread should", "num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) # The stop reason of the thread", "But we want to print to stdout only if self.TraceOn()", "for line in raw_output.split(os.linesep): match = frameRE.search(line) if match: function", "print(disasm) def setUp(self): # Call super's setUp(). TestBase.setUp(self) # Find", "breakOnCtor(self): \"\"\"Setup/run the program so it stops on C's constructor.\"\"\"", "TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble each call frame when stopped on", "reason = breakpoint %d.' % (bpno)]) # This test was", "to print out the disassembly. # But we want to", "for the frame info, .* # wildcard, and 0x[0-9a-f]{16} #", "function: # Get all instructions for this function and print", "from __future__ import print_function import os import time import lldb", "main.cpp. self.line = line_number('main.cpp', '// Set break point at this", "failing because we fail to put the C:: in front", "True. disasm = str(inst) if self.TraceOn(): print(disasm) def setUp(self): #", "on the ctor function of class C. bpno = lldbutil.run_break_set_by_file_and_line(", "if match: function = match.group(1) #print(\"line:\", line) #print(\"function:\", function) self.runCmd(\"disassemble", "specifically, but we shouldn't # fail this whole testcase for", "on the ctor function of class C. # self.expect(\"thread backtrace\",", "stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() # Now use the", "for this function and print them out. insts = function.GetInstructions(target)", "only if self.TraceOn() is # True. disasm = str(inst) if", "frame pc, and \\sa.out`(.+) # module`function, and \\s\\+\\s # the", "str(inst) if self.TraceOn(): print(disasm) def setUp(self): # Call super's setUp().", "lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import", "^\\s\\sframe # heading for the frame info, .* # wildcard,", "do 'print inst' to print out the disassembly. # But", "inst' to print out the disassembly. # But we want", "of the thread should be breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped',", "target.GetProcess() thread = lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth = thread.GetNumFrames()", "the frame info, .* # wildcard, and 0x[0-9a-f]{16} # the", "import lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble", "rest ' + ....' \"\"\", re.VERBOSE) for line in raw_output.split(os.linesep):", "+ ....' \"\"\", re.VERBOSE) for line in raw_output.split(os.linesep): match =", "stops on C's constructor.\"\"\" exe = os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \"", "match = frameRE.search(line) if match: function = match.group(1) #print(\"line:\", line)", "issue. # We should be stopped on the ctor function", "= thread.GetFrameAtIndex(i) function = frame.GetFunction() # Print the function header.", "self.breakOnCtor() raw_output = self.res.GetOutput() frameRE = re.compile(r\"\"\" ^\\s\\sframe # heading", "call stack and # disassemble it. target = self.dbg.GetSelectedTarget() process", "on C's constructor.\"\"\" exe = os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \" +", "an inessential issue. # We should be stopped on the", "was failing because we fail to put the C:: in", "stdout only if self.TraceOn() is # True. disasm = str(inst)", "point at this line.') def breakOnCtor(self): \"\"\"Setup/run the program so", "0x[0-9a-f]{16} # the frame pc, and \\sa.out`(.+) # module`function, and", "shouldn't # fail this whole testcase for an inessential issue.", "Call super's setUp(). TestBase.setUp(self) # Find the line number to", "__future__ import print_function import os import time import lldb from", "the rest ' + ....' \"\"\", re.VERBOSE) for line in", "thread.GetFrameAtIndex(i) function = frame.GetFunction() # Print the function header. if", "self.res.GetOutput() frameRE = re.compile(r\"\"\" ^\\s\\sframe # heading for the frame", "another testcase to cover that specifically, but we shouldn't #", "make another testcase to cover that specifically, but we shouldn't", "print out the disassembly. # But we want to print", "could simply do 'print inst' to print out the disassembly.", "each call frame when stopped on C's ctor. \"\"\" from", "reason of the thread should be breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT,", "def setUp(self): # Call super's setUp(). TestBase.setUp(self) # Find the", "bpno = lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) #", "for an inessential issue. # We should be stopped on", "testcase for an inessential issue. # We should be stopped", "+ exe, CURRENT_EXECUTABLE_SET) # Break on the ctor function of", "insts: # We could simply do 'print inst' to print", "the thread should be breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop", "stack and # disassemble it. target = self.dbg.GetSelectedTarget() process =", "print to stdout only if self.TraceOn() is # True. disasm", "exe = os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \" + exe, CURRENT_EXECUTABLE_SET) #", "heading for the frame info, .* # wildcard, and 0x[0-9a-f]{16}", "is # True. disasm = str(inst) if self.TraceOn(): print(disasm) def", "% (bpno)]) # This test was failing because we fail", "self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) # The stop reason of the", "process = target.GetProcess() thread = lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth", "'%s'\" % function) @add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble each call frame", "in insts: # We could simply do 'print inst' to", "to print to stdout only if self.TraceOn() is # True.", "should be stopped on the ctor function of class C.", "import * from lldbsuite.test import lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir =", "command on each call frame when stopped on C's ctor.", "Get all instructions for this function and print them out.", "from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class IterateFrameAndDisassembleTestCase(TestBase):", "the line number to break for main.cpp. self.line = line_number('main.cpp',", "raw_output.split(os.linesep): match = frameRE.search(line) if match: function = match.group(1) #print(\"line:\",", "i in range(depth - 1): frame = thread.GetFrameAtIndex(i) function =", "frameRE.search(line) if match: function = match.group(1) #print(\"line:\", line) #print(\"function:\", function)", "when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() raw_output = self.res.GetOutput()", "STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint %d.' % (bpno)]) #", "on the call stack and # disassemble it. target =", "self.TraceOn() is # True. disasm = str(inst) if self.TraceOn(): print(disasm)", "= os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \" + exe, CURRENT_EXECUTABLE_SET) # Break", "print_function import os import time import lldb from lldbsuite.test.decorators import", "import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import *", "= breakpoint %d.' % (bpno)]) # This test was failing", "for inst in insts: # We could simply do 'print", "print() print(function) if function: # Get all instructions for this", "each call frame when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor()", "self.runCmd(\"file \" + exe, CURRENT_EXECUTABLE_SET) # Break on the ctor", "# module`function, and \\s\\+\\s # the rest ' + ....'", "disassemble command on each call frame when stopped on C's", "self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint %d.' %", "substrs=['stopped', 'stop reason = breakpoint %d.' % (bpno)]) # This", "import print_function import os import time import lldb from lldbsuite.test.decorators", "re.compile(r\"\"\" ^\\s\\sframe # heading for the frame info, .* #", "Print the function header. if self.TraceOn(): print() print(function) if function:", "We should be stopped on the ctor function of class", "#print(\"function:\", function) self.runCmd(\"disassemble -n '%s'\" % function) @add_test_categories(['pyapi']) def test_and_python_api(self):", "function = match.group(1) #print(\"line:\", line) #print(\"function:\", function) self.runCmd(\"disassemble -n '%s'\"", "fail to put the C:: in front of constructore. #", "Break on the ctor function of class C. bpno =", "self.breakOnCtor() # Now use the Python API to get at", "<gh_stars>100-1000 \"\"\" Test the lldb disassemble command on each call", "at this line.') def breakOnCtor(self): \"\"\"Setup/run the program so it", "class C. # self.expect(\"thread backtrace\", BACKTRACE_DISPLAYED_CORRECTLY, # substrs = ['C::C'])", "line_number('main.cpp', '// Set break point at this line.') def breakOnCtor(self):", "This test was failing because we fail to put the", "= frameRE.search(line) if match: function = match.group(1) #print(\"line:\", line) #print(\"function:\",", "call frame when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() raw_output", "the program so it stops on C's constructor.\"\"\" exe =", "disassembly. # But we want to print to stdout only", "but we shouldn't # fail this whole testcase for an", "C's constructor.\"\"\" self.build() self.breakOnCtor() raw_output = self.res.GetOutput() frameRE = re.compile(r\"\"\"", "#print(\"line:\", line) #print(\"function:\", function) self.runCmd(\"disassemble -n '%s'\" % function) @add_test_categories(['pyapi'])", "# We should maybe make another testcase to cover that", "put the C:: in front of constructore. # We should", "disassemble it. target = self.dbg.GetSelectedTarget() process = target.GetProcess() thread =", "frame info, .* # wildcard, and 0x[0-9a-f]{16} # the frame", "import time import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest", "# But we want to print to stdout only if", "super's setUp(). TestBase.setUp(self) # Find the line number to break", "# Find the line number to break for main.cpp. self.line", "print(function) if function: # Get all instructions for this function", "from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test", "frame = thread.GetFrameAtIndex(i) function = frame.GetFunction() # Print the function", "constructore. # We should maybe make another testcase to cover", "stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() raw_output = self.res.GetOutput() frameRE", "if self.TraceOn() is # True. disasm = str(inst) if self.TraceOn():", "# heading for the frame info, .* # wildcard, and", "from lldbsuite.test import lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def", "function of class C. bpno = lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line,", "# The stop reason of the thread should be breakpoint.", "# Call super's setUp(). TestBase.setUp(self) # Find the line number", "frame when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() # Now", "C's ctor. \"\"\" from __future__ import print_function import os import", "\"\"\"Setup/run the program so it stops on C's constructor.\"\"\" exe", "list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason = breakpoint %d.' % (bpno)])", "on C's constructor.\"\"\" self.build() self.breakOnCtor() # Now use the Python", "\\s\\+\\s # the rest ' + ....' \"\"\", re.VERBOSE) for", "= lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth = thread.GetNumFrames() for i", "on each call frame when stopped on C's ctor. \"\"\"", "function = frame.GetFunction() # Print the function header. if self.TraceOn():", "because we fail to put the C:: in front of", "for i in range(depth - 1): frame = thread.GetFrameAtIndex(i) function", "in raw_output.split(os.linesep): match = frameRE.search(line) if match: function = match.group(1)", "inessential issue. # We should be stopped on the ctor", "C. bpno = lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED)", "C:: in front of constructore. # We should maybe make", "C's constructor.\"\"\" self.build() self.breakOnCtor() # Now use the Python API", "setUp(). TestBase.setUp(self) # Find the line number to break for", "of class C. # self.expect(\"thread backtrace\", BACKTRACE_DISPLAYED_CORRECTLY, # substrs =", "target = self.dbg.GetSelectedTarget() process = target.GetProcess() thread = lldbutil.get_stopped_thread( process,", "% function) @add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble each call frame when", "lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) # The stop", "= lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) # The", "wildcard, and 0x[0-9a-f]{16} # the frame pc, and \\sa.out`(.+) #", "function) self.runCmd(\"disassemble -n '%s'\" % function) @add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble", "IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble each call frame", "'stop reason = breakpoint %d.' % (bpno)]) # This test", "os import time import lldb from lldbsuite.test.decorators import * from", "# This test was failing because we fail to put", "for main.cpp. self.line = line_number('main.cpp', '// Set break point at", "Now use the Python API to get at each function", "and \\s\\+\\s # the rest ' + ....' \"\"\", re.VERBOSE)", "pc, and \\sa.out`(.+) # module`function, and \\s\\+\\s # the rest", "print them out. insts = function.GetInstructions(target) for inst in insts:", "# Break on the ctor function of class C. bpno", "to put the C:: in front of constructore. # We", "* from lldbsuite.test import lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__)", "when stopped on C's ctor. \"\"\" from __future__ import print_function", "lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth = thread.GetNumFrames() for i in", "= TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble each call frame when stopped", "constructor.\"\"\" exe = os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \" + exe, CURRENT_EXECUTABLE_SET)", "maybe make another testcase to cover that specifically, but we", "use the Python API to get at each function on", "on C's ctor. \"\"\" from __future__ import print_function import os", "# Print the function header. if self.TraceOn(): print() print(function) if", "simply do 'print inst' to print out the disassembly. #", "the C:: in front of constructore. # We should maybe", "to break for main.cpp. self.line = line_number('main.cpp', '// Set break", "# We should be stopped on the ctor function of", "of constructore. # We should maybe make another testcase to", "re.VERBOSE) for line in raw_output.split(os.linesep): match = frameRE.search(line) if match:", "disasm = str(inst) if self.TraceOn(): print(disasm) def setUp(self): # Call", "# We could simply do 'print inst' to print out", "to stdout only if self.TraceOn() is # True. disasm =", "1): frame = thread.GetFrameAtIndex(i) function = frame.GetFunction() # Print the", "frame.GetFunction() # Print the function header. if self.TraceOn(): print() print(function)", "lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from", "TestBase.setUp(self) # Find the line number to break for main.cpp.", "the ctor function of class C. bpno = lldbutil.run_break_set_by_file_and_line( self,", "C's constructor.\"\"\" exe = os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \" + exe,", "= target.GetProcess() thread = lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth =", "import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil", "import os import time import lldb from lldbsuite.test.decorators import *", ".* # wildcard, and 0x[0-9a-f]{16} # the frame pc, and", "header. if self.TraceOn(): print() print(function) if function: # Get all", "thread = lldbutil.get_stopped_thread( process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth = thread.GetNumFrames() for", "raw_output = self.res.GetOutput() frameRE = re.compile(r\"\"\" ^\\s\\sframe # heading for", "when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() # Now use", "\"\"\", re.VERBOSE) for line in raw_output.split(os.linesep): match = frameRE.search(line) if", "self.runCmd(\"disassemble -n '%s'\" % function) @add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble each", "we fail to put the C:: in front of constructore.", "self.line = line_number('main.cpp', '// Set break point at this line.')", "frame when stopped on C's ctor. \"\"\" from __future__ import", "\"\"\" from __future__ import print_function import os import time import", "should maybe make another testcase to cover that specifically, but", "class IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble each call", "function.GetInstructions(target) for inst in insts: # We could simply do", "info, .* # wildcard, and 0x[0-9a-f]{16} # the frame pc,", "thread should be breakpoint. self.expect(\"thread list\", STOPPED_DUE_TO_BREAKPOINT, substrs=['stopped', 'stop reason", "line in raw_output.split(os.linesep): match = frameRE.search(line) if match: function =", "os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file \" + exe, CURRENT_EXECUTABLE_SET) # Break on", "the function header. if self.TraceOn(): print() print(function) if function: #", "lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) def test_and_run_command(self): \"\"\"Disassemble each", "self.TraceOn(): print(disasm) def setUp(self): # Call super's setUp(). TestBase.setUp(self) #", "and 0x[0-9a-f]{16} # the frame pc, and \\sa.out`(.+) # module`function,", "\" + exe, CURRENT_EXECUTABLE_SET) # Break on the ctor function", "and # disassemble it. target = self.dbg.GetSelectedTarget() process = target.GetProcess()", "function and print them out. insts = function.GetInstructions(target) for inst", "of class C. bpno = lldbutil.run_break_set_by_file_and_line( self, \"main.cpp\", self.line, num_expected_locations=-1)", "stopped on the ctor function of class C. # self.expect(\"thread", "\\sa.out`(.+) # module`function, and \\s\\+\\s # the rest ' +", "all instructions for this function and print them out. insts", "= match.group(1) #print(\"line:\", line) #print(\"function:\", function) self.runCmd(\"disassemble -n '%s'\" %", "lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class IterateFrameAndDisassembleTestCase(TestBase): mydir", "'print inst' to print out the disassembly. # But we", "testcase to cover that specifically, but we shouldn't # fail", "call frame when stopped on C's ctor. \"\"\" from __future__", "match.group(1) #print(\"line:\", line) #print(\"function:\", function) self.runCmd(\"disassemble -n '%s'\" % function)", "function) @add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble each call frame when stopped", "be stopped on the ctor function of class C. #", "breakpoint %d.' % (bpno)]) # This test was failing because", "range(depth - 1): frame = thread.GetFrameAtIndex(i) function = frame.GetFunction() #", "Set break point at this line.') def breakOnCtor(self): \"\"\"Setup/run the", "it stops on C's constructor.\"\"\" exe = os.path.join(os.getcwd(), \"a.out\") self.runCmd(\"file", "frame when stopped on C's constructor.\"\"\" self.build() self.breakOnCtor() raw_output =", "\"main.cpp\", self.line, num_expected_locations=-1) self.runCmd(\"run\", RUN_SUCCEEDED) # The stop reason of", "line number to break for main.cpp. self.line = line_number('main.cpp', '//", "them out. insts = function.GetInstructions(target) for inst in insts: #", "= line_number('main.cpp', '// Set break point at this line.') def", "the frame pc, and \\sa.out`(.+) # module`function, and \\s\\+\\s #", "process, lldb.eStopReasonBreakpoint) self.assertIsNotNone(thread) depth = thread.GetNumFrames() for i in range(depth", "= re.compile(r\"\"\" ^\\s\\sframe # heading for the frame info, .*", "match: function = match.group(1) #print(\"line:\", line) #print(\"function:\", function) self.runCmd(\"disassemble -n", "if function: # Get all instructions for this function and", "def test_and_python_api(self): \"\"\"Disassemble each call frame when stopped on C's", "instructions for this function and print them out. insts =", "number to break for main.cpp. self.line = line_number('main.cpp', '// Set", "= self.res.GetOutput() frameRE = re.compile(r\"\"\" ^\\s\\sframe # heading for the", "API to get at each function on the call stack", "- 1): frame = thread.GetFrameAtIndex(i) function = frame.GetFunction() # Print", "@add_test_categories(['pyapi']) def test_and_python_api(self): \"\"\"Disassemble each call frame when stopped on", "each function on the call stack and # disassemble it." ]
[ "LPAR = \"(\" MAYBE = \"maybe\" NUMBER = \"number\" OF", "\"?\" ROOM = \"room\" RPAR = \")\" S = \"'s\"", "\"found\" HAD = \"had\" HATTA = \"hatta\" LETTER = \"letter\"", "\"eventually\" FOUND = \"found\" HAD = \"had\" HATTA = \"hatta\"", "BECAUSE = \"because\" BUT = \"but\" CLOSED = \"closed\" COMMA", "EITHER = \"either\" ENOUGH = \"enough\" EVENTUALLY = \"eventually\" FOUND", "= \"maybe\" NUMBER = \"number\" OF = \"of\" OPENED =", "UNDERSCORE = \"_\" UNSURE = \"unsure\" WAS = \"was\" WHAT", ",ROOM ,RPAR ,S ,SAID, SENTENCE ,SO ,SPIDER ,SPOKE ,THE ,THEN", "LOOKING_GLASS = \"looking-glass\" LPAR = \"(\" MAYBE = \"maybe\" NUMBER", "A, ALICE, AND, ATE, BECAME ,BECAUSE ,BUT ,CLOSED ,COMMA ,CONTAINED", ",SO ,SPIDER ,SPOKE ,THE ,THEN ,TIMES ,TOO ,UNDERSCORE ,UNSURE ,WAS", "= \"room\" RPAR = \")\" S = \"'s\" SAID =", "\"said\" SENTENCE = \"sentence\" SO = \"so\" SPIDER = \"spider\"", "= \"then\" TIMES = \"times\" TOO = \"too\" UNDERSCORE =", "FOUND = \"found\" HAD = \"had\" HATTA = \"hatta\" LETTER", "words used in MAlice. ''' A = \"a\" ALICE =", "\"spider\" SPOKE = \"spoke\" THE = \"The\" THEN = \"then\"", "\"letter\" LOOKING_GLASS = \"looking-glass\" LPAR = \"(\" MAYBE = \"maybe\"", "= \"which\" RESTRICTED = [ A, ALICE, AND, ATE, BECAME", ",OR ,PERHAPS ,PIECE ,QUESTION ,ROOM ,RPAR ,S ,SAID, SENTENCE ,SO", ",COMMA ,CONTAINED ,DOT ,DRANK ,EITHER ,ENOUGH ,EVENTUALLY ,FOUND ,HAD ,HATTA", "DRANK = \"drank\" EITHER = \"either\" ENOUGH = \"enough\" EVENTUALLY", "BUT = \"but\" CLOSED = \"closed\" COMMA = \",\" CONTAINED", ",OPENED ,OR ,PERHAPS ,PIECE ,QUESTION ,ROOM ,RPAR ,S ,SAID, SENTENCE", "= \"'s\" SAID = \"said\" SENTENCE = \"sentence\" SO =", "= \"but\" CLOSED = \"closed\" COMMA = \",\" CONTAINED =", ",EVENTUALLY ,FOUND ,HAD ,HATTA ,LETTER ,LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER ,OF", "\"so\" SPIDER = \"spider\" SPOKE = \"spoke\" THE = \"The\"", "\"which\" RESTRICTED = [ A, ALICE, AND, ATE, BECAME ,BECAUSE", "= \".\" DRANK = \"drank\" EITHER = \"either\" ENOUGH =", "\"what\" WHICH = \"which\" RESTRICTED = [ A, ALICE, AND,", ",RPAR ,S ,SAID, SENTENCE ,SO ,SPIDER ,SPOKE ,THE ,THEN ,TIMES", "\"hatta\" LETTER = \"letter\" LOOKING_GLASS = \"looking-glass\" LPAR = \"(\"", "QUESTION = \"?\" ROOM = \"room\" RPAR = \")\" S", ",BUT ,CLOSED ,COMMA ,CONTAINED ,DOT ,DRANK ,EITHER ,ENOUGH ,EVENTUALLY ,FOUND", "\")\" S = \"'s\" SAID = \"said\" SENTENCE = \"sentence\"", ",ENOUGH ,EVENTUALLY ,FOUND ,HAD ,HATTA ,LETTER ,LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER", "individual words used in MAlice. ''' A = \"a\" ALICE", "= \"looking-glass\" LPAR = \"(\" MAYBE = \"maybe\" NUMBER =", "\"(\" MAYBE = \"maybe\" NUMBER = \"number\" OF = \"of\"", ",SAID, SENTENCE ,SO ,SPIDER ,SPOKE ,THE ,THEN ,TIMES ,TOO ,UNDERSCORE", "\"had\" HATTA = \"hatta\" LETTER = \"letter\" LOOKING_GLASS = \"looking-glass\"", "CONTAINED = \"contained\" DOT = \".\" DRANK = \"drank\" EITHER", "SAID = \"said\" SENTENCE = \"sentence\" SO = \"so\" SPIDER", "NUMBER = \"number\" OF = \"of\" OPENED = \"opened\" OR", "= \"perhaps\" PIECE = \"piece\" QUESTION = \"?\" ROOM =", "\"enough\" EVENTUALLY = \"eventually\" FOUND = \"found\" HAD = \"had\"", "= \"sentence\" SO = \"so\" SPIDER = \"spider\" SPOKE =", "RESTRICTED = [ A, ALICE, AND, ATE, BECAME ,BECAUSE ,BUT", "ATE, BECAME ,BECAUSE ,BUT ,CLOSED ,COMMA ,CONTAINED ,DOT ,DRANK ,EITHER", ",DOT ,DRANK ,EITHER ,ENOUGH ,EVENTUALLY ,FOUND ,HAD ,HATTA ,LETTER ,LOOKING_GLASS", "= \"so\" SPIDER = \"spider\" SPOKE = \"spoke\" THE =", "= \"opened\" OR = \"or\" PERHAPS = \"perhaps\" PIECE =", "= \"spider\" SPOKE = \"spoke\" THE = \"The\" THEN =", "\"then\" TIMES = \"times\" TOO = \"too\" UNDERSCORE = \"_\"", "= \"or\" PERHAPS = \"perhaps\" PIECE = \"piece\" QUESTION =", "\"or\" PERHAPS = \"perhaps\" PIECE = \"piece\" QUESTION = \"?\"", "= \"found\" HAD = \"had\" HATTA = \"hatta\" LETTER =", "\"Alice\" AND = \"and\" ATE = \"ate\" BECAME = \"became\"", "= \"too\" UNDERSCORE = \"_\" UNSURE = \"unsure\" WAS =", "used in MAlice. ''' A = \"a\" ALICE = \"Alice\"", "\"sentence\" SO = \"so\" SPIDER = \"spider\" SPOKE = \"spoke\"", "\"contained\" DOT = \".\" DRANK = \"drank\" EITHER = \"either\"", "''' All the reserved, individual words used in MAlice. '''", "\"unsure\" WAS = \"was\" WHAT = \"what\" WHICH = \"which\"", "HATTA = \"hatta\" LETTER = \"letter\" LOOKING_GLASS = \"looking-glass\" LPAR", "= \")\" S = \"'s\" SAID = \"said\" SENTENCE =", "AND = \"and\" ATE = \"ate\" BECAME = \"became\" BECAUSE", ",HATTA ,LETTER ,LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER ,OF ,OPENED ,OR ,PERHAPS", ",S ,SAID, SENTENCE ,SO ,SPIDER ,SPOKE ,THE ,THEN ,TIMES ,TOO", "OR = \"or\" PERHAPS = \"perhaps\" PIECE = \"piece\" QUESTION", "\",\" CONTAINED = \"contained\" DOT = \".\" DRANK = \"drank\"", "the reserved, individual words used in MAlice. ''' A =", "SO = \"so\" SPIDER = \"spider\" SPOKE = \"spoke\" THE", "= \"spoke\" THE = \"The\" THEN = \"then\" TIMES =", "EVENTUALLY = \"eventually\" FOUND = \"found\" HAD = \"had\" HATTA", "\"drank\" EITHER = \"either\" ENOUGH = \"enough\" EVENTUALLY = \"eventually\"", "HAD = \"had\" HATTA = \"hatta\" LETTER = \"letter\" LOOKING_GLASS", "ATE = \"ate\" BECAME = \"became\" BECAUSE = \"because\" BUT", "\"room\" RPAR = \")\" S = \"'s\" SAID = \"said\"", "SPOKE = \"spoke\" THE = \"The\" THEN = \"then\" TIMES", "\"_\" UNSURE = \"unsure\" WAS = \"was\" WHAT = \"what\"", ",OF ,OPENED ,OR ,PERHAPS ,PIECE ,QUESTION ,ROOM ,RPAR ,S ,SAID,", "\"piece\" QUESTION = \"?\" ROOM = \"room\" RPAR = \")\"", "= \"said\" SENTENCE = \"sentence\" SO = \"so\" SPIDER =", "= \"The\" THEN = \"then\" TIMES = \"times\" TOO =", "\"number\" OF = \"of\" OPENED = \"opened\" OR = \"or\"", ",PIECE ,QUESTION ,ROOM ,RPAR ,S ,SAID, SENTENCE ,SO ,SPIDER ,SPOKE", "RPAR = \")\" S = \"'s\" SAID = \"said\" SENTENCE", ",MAYBE ,NUMBER ,OF ,OPENED ,OR ,PERHAPS ,PIECE ,QUESTION ,ROOM ,RPAR", "\".\" DRANK = \"drank\" EITHER = \"either\" ENOUGH = \"enough\"", "= \"times\" TOO = \"too\" UNDERSCORE = \"_\" UNSURE =", "ALICE = \"Alice\" AND = \"and\" ATE = \"ate\" BECAME", "= \"became\" BECAUSE = \"because\" BUT = \"but\" CLOSED =", "\"and\" ATE = \"ate\" BECAME = \"became\" BECAUSE = \"because\"", "= \"what\" WHICH = \"which\" RESTRICTED = [ A, ALICE,", "BECAME ,BECAUSE ,BUT ,CLOSED ,COMMA ,CONTAINED ,DOT ,DRANK ,EITHER ,ENOUGH", "\"either\" ENOUGH = \"enough\" EVENTUALLY = \"eventually\" FOUND = \"found\"", "ROOM = \"room\" RPAR = \")\" S = \"'s\" SAID", "\"maybe\" NUMBER = \"number\" OF = \"of\" OPENED = \"opened\"", ",PERHAPS ,PIECE ,QUESTION ,ROOM ,RPAR ,S ,SAID, SENTENCE ,SO ,SPIDER", "ALICE, AND, ATE, BECAME ,BECAUSE ,BUT ,CLOSED ,COMMA ,CONTAINED ,DOT", "MAlice. ''' A = \"a\" ALICE = \"Alice\" AND =", "CLOSED = \"closed\" COMMA = \",\" CONTAINED = \"contained\" DOT", ",HAD ,HATTA ,LETTER ,LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER ,OF ,OPENED ,OR", "PIECE = \"piece\" QUESTION = \"?\" ROOM = \"room\" RPAR", "''' A = \"a\" ALICE = \"Alice\" AND = \"and\"", "\"looking-glass\" LPAR = \"(\" MAYBE = \"maybe\" NUMBER = \"number\"", "TOO = \"too\" UNDERSCORE = \"_\" UNSURE = \"unsure\" WAS", "\"because\" BUT = \"but\" CLOSED = \"closed\" COMMA = \",\"", "= \"either\" ENOUGH = \"enough\" EVENTUALLY = \"eventually\" FOUND =", "= \"ate\" BECAME = \"became\" BECAUSE = \"because\" BUT =", "LETTER = \"letter\" LOOKING_GLASS = \"looking-glass\" LPAR = \"(\" MAYBE", "WHAT = \"what\" WHICH = \"which\" RESTRICTED = [ A,", "= \"had\" HATTA = \"hatta\" LETTER = \"letter\" LOOKING_GLASS =", "= \"a\" ALICE = \"Alice\" AND = \"and\" ATE =", "BECAME = \"became\" BECAUSE = \"because\" BUT = \"but\" CLOSED", "SENTENCE ,SO ,SPIDER ,SPOKE ,THE ,THEN ,TIMES ,TOO ,UNDERSCORE ,UNSURE", ",SPIDER ,SPOKE ,THE ,THEN ,TIMES ,TOO ,UNDERSCORE ,UNSURE ,WAS ,WHAT", ",FOUND ,HAD ,HATTA ,LETTER ,LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER ,OF ,OPENED", ",DRANK ,EITHER ,ENOUGH ,EVENTUALLY ,FOUND ,HAD ,HATTA ,LETTER ,LOOKING_GLASS ,LPAR", "OF = \"of\" OPENED = \"opened\" OR = \"or\" PERHAPS", "WAS = \"was\" WHAT = \"what\" WHICH = \"which\" RESTRICTED", ",EITHER ,ENOUGH ,EVENTUALLY ,FOUND ,HAD ,HATTA ,LETTER ,LOOKING_GLASS ,LPAR ,MAYBE", "\"spoke\" THE = \"The\" THEN = \"then\" TIMES = \"times\"", "[ A, ALICE, AND, ATE, BECAME ,BECAUSE ,BUT ,CLOSED ,COMMA", "= \"number\" OF = \"of\" OPENED = \"opened\" OR =", "UNSURE = \"unsure\" WAS = \"was\" WHAT = \"what\" WHICH", "= \"hatta\" LETTER = \"letter\" LOOKING_GLASS = \"looking-glass\" LPAR =", "= \",\" CONTAINED = \"contained\" DOT = \".\" DRANK =", "OPENED = \"opened\" OR = \"or\" PERHAPS = \"perhaps\" PIECE", "= \"was\" WHAT = \"what\" WHICH = \"which\" RESTRICTED =", "= \"(\" MAYBE = \"maybe\" NUMBER = \"number\" OF =", "MAYBE = \"maybe\" NUMBER = \"number\" OF = \"of\" OPENED", "THEN = \"then\" TIMES = \"times\" TOO = \"too\" UNDERSCORE", ",LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER ,OF ,OPENED ,OR ,PERHAPS ,PIECE ,QUESTION", "\"too\" UNDERSCORE = \"_\" UNSURE = \"unsure\" WAS = \"was\"", "\"a\" ALICE = \"Alice\" AND = \"and\" ATE = \"ate\"", "\"became\" BECAUSE = \"because\" BUT = \"but\" CLOSED = \"closed\"", "= \"eventually\" FOUND = \"found\" HAD = \"had\" HATTA =", "= \"Alice\" AND = \"and\" ATE = \"ate\" BECAME =", ",SPOKE ,THE ,THEN ,TIMES ,TOO ,UNDERSCORE ,UNSURE ,WAS ,WHAT ,WHICH]", "= \"because\" BUT = \"but\" CLOSED = \"closed\" COMMA =", "TIMES = \"times\" TOO = \"too\" UNDERSCORE = \"_\" UNSURE", "= \"of\" OPENED = \"opened\" OR = \"or\" PERHAPS =", "\"but\" CLOSED = \"closed\" COMMA = \",\" CONTAINED = \"contained\"", ",NUMBER ,OF ,OPENED ,OR ,PERHAPS ,PIECE ,QUESTION ,ROOM ,RPAR ,S", "AND, ATE, BECAME ,BECAUSE ,BUT ,CLOSED ,COMMA ,CONTAINED ,DOT ,DRANK", "= \"drank\" EITHER = \"either\" ENOUGH = \"enough\" EVENTUALLY =", "\"opened\" OR = \"or\" PERHAPS = \"perhaps\" PIECE = \"piece\"", ",BECAUSE ,BUT ,CLOSED ,COMMA ,CONTAINED ,DOT ,DRANK ,EITHER ,ENOUGH ,EVENTUALLY", "\"of\" OPENED = \"opened\" OR = \"or\" PERHAPS = \"perhaps\"", ",CONTAINED ,DOT ,DRANK ,EITHER ,ENOUGH ,EVENTUALLY ,FOUND ,HAD ,HATTA ,LETTER", "SENTENCE = \"sentence\" SO = \"so\" SPIDER = \"spider\" SPOKE", "= \"piece\" QUESTION = \"?\" ROOM = \"room\" RPAR =", "= \"enough\" EVENTUALLY = \"eventually\" FOUND = \"found\" HAD =", "THE = \"The\" THEN = \"then\" TIMES = \"times\" TOO", "\"perhaps\" PIECE = \"piece\" QUESTION = \"?\" ROOM = \"room\"", "= [ A, ALICE, AND, ATE, BECAME ,BECAUSE ,BUT ,CLOSED", "WHICH = \"which\" RESTRICTED = [ A, ALICE, AND, ATE,", "All the reserved, individual words used in MAlice. ''' A", "= \"unsure\" WAS = \"was\" WHAT = \"what\" WHICH =", "ENOUGH = \"enough\" EVENTUALLY = \"eventually\" FOUND = \"found\" HAD", "\"The\" THEN = \"then\" TIMES = \"times\" TOO = \"too\"", ",CLOSED ,COMMA ,CONTAINED ,DOT ,DRANK ,EITHER ,ENOUGH ,EVENTUALLY ,FOUND ,HAD", "DOT = \".\" DRANK = \"drank\" EITHER = \"either\" ENOUGH", "\"closed\" COMMA = \",\" CONTAINED = \"contained\" DOT = \".\"", "\"'s\" SAID = \"said\" SENTENCE = \"sentence\" SO = \"so\"", "PERHAPS = \"perhaps\" PIECE = \"piece\" QUESTION = \"?\" ROOM", "\"times\" TOO = \"too\" UNDERSCORE = \"_\" UNSURE = \"unsure\"", "A = \"a\" ALICE = \"Alice\" AND = \"and\" ATE", "in MAlice. ''' A = \"a\" ALICE = \"Alice\" AND", "= \"?\" ROOM = \"room\" RPAR = \")\" S =", "\"ate\" BECAME = \"became\" BECAUSE = \"because\" BUT = \"but\"", "= \"letter\" LOOKING_GLASS = \"looking-glass\" LPAR = \"(\" MAYBE =", ",LETTER ,LOOKING_GLASS ,LPAR ,MAYBE ,NUMBER ,OF ,OPENED ,OR ,PERHAPS ,PIECE", ",LPAR ,MAYBE ,NUMBER ,OF ,OPENED ,OR ,PERHAPS ,PIECE ,QUESTION ,ROOM", ",QUESTION ,ROOM ,RPAR ,S ,SAID, SENTENCE ,SO ,SPIDER ,SPOKE ,THE", "COMMA = \",\" CONTAINED = \"contained\" DOT = \".\" DRANK", "= \"and\" ATE = \"ate\" BECAME = \"became\" BECAUSE =", "\"was\" WHAT = \"what\" WHICH = \"which\" RESTRICTED = [", "= \"_\" UNSURE = \"unsure\" WAS = \"was\" WHAT =", "reserved, individual words used in MAlice. ''' A = \"a\"", "= \"contained\" DOT = \".\" DRANK = \"drank\" EITHER =", "S = \"'s\" SAID = \"said\" SENTENCE = \"sentence\" SO", "SPIDER = \"spider\" SPOKE = \"spoke\" THE = \"The\" THEN", "= \"closed\" COMMA = \",\" CONTAINED = \"contained\" DOT =" ]
[ "python3 # Author: C.K # Email: <EMAIL> # DateTime:2021-04-12 18:35:15", "newInterval[0]: res.append(interval) elif interval[0] > newInterval[1]: res.append(newInterval) newInterval = interval", "intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]: res, i = [],", "min(interval[0], newInterval[0]), max(interval[1], newInterval[1]) ] res.append(newInterval) return res if __name__", "# Email: <EMAIL> # DateTime:2021-04-12 18:35:15 # Description: import os", "List[List[int]], newInterval: List[int]) -> List[List[int]]: res, i = [], 0", "= [], 0 for interval in intervals: if interval[1] <", "interval elif interval[1] >= newInterval[0] or newInterval[1] >= interval[0]: newInterval", "> newInterval[1]: res.append(newInterval) newInterval = interval elif interval[1] >= newInterval[0]", "class Solution: def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]:", "Solution: def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]: res,", "DateTime:2021-04-12 18:35:15 # Description: import os import sys class Solution:", "res, i = [], 0 for interval in intervals: if", "C.K # Email: <EMAIL> # DateTime:2021-04-12 18:35:15 # Description: import", "newInterval[1]: res.append(newInterval) newInterval = interval elif interval[1] >= newInterval[0] or", "for interval in intervals: if interval[1] < newInterval[0]: res.append(interval) elif", "newInterval[0] or newInterval[1] >= interval[0]: newInterval = [ min(interval[0], newInterval[0]),", "newInterval[1] >= interval[0]: newInterval = [ min(interval[0], newInterval[0]), max(interval[1], newInterval[1])", "[ min(interval[0], newInterval[0]), max(interval[1], newInterval[1]) ] res.append(newInterval) return res if", "-> List[List[int]]: res, i = [], 0 for interval in", "= [ min(interval[0], newInterval[0]), max(interval[1], newInterval[1]) ] res.append(newInterval) return res", "i = [], 0 for interval in intervals: if interval[1]", "newInterval[1]) ] res.append(newInterval) return res if __name__ == \"__main__\": pass", "res.append(interval) elif interval[0] > newInterval[1]: res.append(newInterval) newInterval = interval elif", "interval[1] < newInterval[0]: res.append(interval) elif interval[0] > newInterval[1]: res.append(newInterval) newInterval", "max(interval[1], newInterval[1]) ] res.append(newInterval) return res if __name__ == \"__main__\":", "os import sys class Solution: def insert(self, intervals: List[List[int]], newInterval:", "res.append(newInterval) newInterval = interval elif interval[1] >= newInterval[0] or newInterval[1]", "Description: import os import sys class Solution: def insert(self, intervals:", "newInterval = [ min(interval[0], newInterval[0]), max(interval[1], newInterval[1]) ] res.append(newInterval) return", "elif interval[1] >= newInterval[0] or newInterval[1] >= interval[0]: newInterval =", "# DateTime:2021-04-12 18:35:15 # Description: import os import sys class", "if interval[1] < newInterval[0]: res.append(interval) elif interval[0] > newInterval[1]: res.append(newInterval)", "newInterval[0]), max(interval[1], newInterval[1]) ] res.append(newInterval) return res if __name__ ==", "in intervals: if interval[1] < newInterval[0]: res.append(interval) elif interval[0] >", "or newInterval[1] >= interval[0]: newInterval = [ min(interval[0], newInterval[0]), max(interval[1],", "Email: <EMAIL> # DateTime:2021-04-12 18:35:15 # Description: import os import", "# Description: import os import sys class Solution: def insert(self,", "import sys class Solution: def insert(self, intervals: List[List[int]], newInterval: List[int])", "newInterval: List[int]) -> List[List[int]]: res, i = [], 0 for", "<EMAIL> # DateTime:2021-04-12 18:35:15 # Description: import os import sys", "< newInterval[0]: res.append(interval) elif interval[0] > newInterval[1]: res.append(newInterval) newInterval =", "= interval elif interval[1] >= newInterval[0] or newInterval[1] >= interval[0]:", "# !/usr/bin/env python3 # Author: C.K # Email: <EMAIL> #", ">= interval[0]: newInterval = [ min(interval[0], newInterval[0]), max(interval[1], newInterval[1]) ]", "interval[0] > newInterval[1]: res.append(newInterval) newInterval = interval elif interval[1] >=", "interval[0]: newInterval = [ min(interval[0], newInterval[0]), max(interval[1], newInterval[1]) ] res.append(newInterval)", "interval[1] >= newInterval[0] or newInterval[1] >= interval[0]: newInterval = [", "[], 0 for interval in intervals: if interval[1] < newInterval[0]:", "elif interval[0] > newInterval[1]: res.append(newInterval) newInterval = interval elif interval[1]", "newInterval = interval elif interval[1] >= newInterval[0] or newInterval[1] >=", ">= newInterval[0] or newInterval[1] >= interval[0]: newInterval = [ min(interval[0],", "# Author: C.K # Email: <EMAIL> # DateTime:2021-04-12 18:35:15 #", "sys class Solution: def insert(self, intervals: List[List[int]], newInterval: List[int]) ->", "!/usr/bin/env python3 # Author: C.K # Email: <EMAIL> # DateTime:2021-04-12", "insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]: res, i =", "0 for interval in intervals: if interval[1] < newInterval[0]: res.append(interval)", "interval in intervals: if interval[1] < newInterval[0]: res.append(interval) elif interval[0]", "Author: C.K # Email: <EMAIL> # DateTime:2021-04-12 18:35:15 # Description:", "List[List[int]]: res, i = [], 0 for interval in intervals:", "intervals: if interval[1] < newInterval[0]: res.append(interval) elif interval[0] > newInterval[1]:", "List[int]) -> List[List[int]]: res, i = [], 0 for interval", "18:35:15 # Description: import os import sys class Solution: def", "import os import sys class Solution: def insert(self, intervals: List[List[int]],", "def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]: res, i" ]
[ "contributors. All rights reserved. # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)", "pyMOR project (http://www.pymor.org). # Copyright 2013-2020 pyMOR developers and contributors.", "(http://www.pymor.org). # Copyright 2013-2020 pyMOR developers and contributors. All rights", "License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import runmodule if __name__ == \"__main__\":", "and contributors. All rights reserved. # License: BSD 2-Clause License", "# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import runmodule", "All rights reserved. # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from", "of the pyMOR project (http://www.pymor.org). # Copyright 2013-2020 pyMOR developers", "part of the pyMOR project (http://www.pymor.org). # Copyright 2013-2020 pyMOR", "is part of the pyMOR project (http://www.pymor.org). # Copyright 2013-2020", "This file is part of the pyMOR project (http://www.pymor.org). #", "# Copyright 2013-2020 pyMOR developers and contributors. All rights reserved.", "License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import runmodule if", "Copyright 2013-2020 pyMOR developers and contributors. All rights reserved. #", "the pyMOR project (http://www.pymor.org). # Copyright 2013-2020 pyMOR developers and", "# This file is part of the pyMOR project (http://www.pymor.org).", "reserved. # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import", "pyMOR developers and contributors. All rights reserved. # License: BSD", "BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import runmodule if __name__", "developers and contributors. All rights reserved. # License: BSD 2-Clause", "2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import runmodule if __name__ ==", "2013-2020 pyMOR developers and contributors. All rights reserved. # License:", "rights reserved. # License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) from pymortests.base", "project (http://www.pymor.org). # Copyright 2013-2020 pyMOR developers and contributors. All", "file is part of the pyMOR project (http://www.pymor.org). # Copyright", "(http://opensource.org/licenses/BSD-2-Clause) from pymortests.base import runmodule if __name__ == \"__main__\": runmodule(filename=__file__)" ]
[ ".bucket import SyncStorageBucketAPI from .file_api import SyncBucketProxy __all__ = [", "-> SyncBucketProxy: \"\"\"Run a storage file operation. Parameters ---------- id", "The unique identifier of the bucket \"\"\" return SyncBucketProxy(id, self.url,", "and files.\"\"\" def __init__(self, url: str, headers: dict[str, str]) ->", "storage buckets and files.\"\"\" def __init__(self, url: str, headers: dict[str,", "\"\"\"Manage storage buckets and files.\"\"\" def __init__(self, url: str, headers:", "super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(), ) def from_(self,", "<reponame>anand2312/storage-py<gh_stars>0 from ..utils import SyncClient, __version__ from .bucket import SyncStorageBucketAPI", "__init__(self, url: str, headers: dict[str, str]) -> None: super().__init__( url,", "**headers}, SyncClient(), ) def from_(self, id: str) -> SyncBucketProxy: \"\"\"Run", "import SyncBucketProxy __all__ = [ \"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage", "SyncBucketProxy: \"\"\"Run a storage file operation. Parameters ---------- id The", "identifier of the bucket \"\"\" return SyncBucketProxy(id, self.url, self.headers, self._client)", "from_(self, id: str) -> SyncBucketProxy: \"\"\"Run a storage file operation.", "url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(), ) def from_(self, id:", "from .bucket import SyncStorageBucketAPI from .file_api import SyncBucketProxy __all__ =", "\"\"\"Run a storage file operation. Parameters ---------- id The unique", "---------- id The unique identifier of the bucket \"\"\" return", "{\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(), ) def from_(self, id: str)", "SyncBucketProxy __all__ = [ \"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage", "buckets and files.\"\"\" def __init__(self, url: str, headers: dict[str, str])", "SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets and files.\"\"\" def __init__(self, url: str,", "None: super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(), ) def", "def from_(self, id: str) -> SyncBucketProxy: \"\"\"Run a storage file", "\"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets and files.\"\"\" def", "] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets and files.\"\"\" def __init__(self,", "files.\"\"\" def __init__(self, url: str, headers: dict[str, str]) -> None:", "unique identifier of the bucket \"\"\" return SyncBucketProxy(id, self.url, self.headers,", "SyncClient, __version__ from .bucket import SyncStorageBucketAPI from .file_api import SyncBucketProxy", "str]) -> None: super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(),", "v{__version__}\", **headers}, SyncClient(), ) def from_(self, id: str) -> SyncBucketProxy:", "SyncClient(), ) def from_(self, id: str) -> SyncBucketProxy: \"\"\"Run a", "import SyncClient, __version__ from .bucket import SyncStorageBucketAPI from .file_api import", "class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets and files.\"\"\" def __init__(self, url:", "= [ \"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets and", "-> None: super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(), )", ") def from_(self, id: str) -> SyncBucketProxy: \"\"\"Run a storage", "..utils import SyncClient, __version__ from .bucket import SyncStorageBucketAPI from .file_api", "SyncStorageBucketAPI from .file_api import SyncBucketProxy __all__ = [ \"SyncStorageClient\", ]", "import SyncStorageBucketAPI from .file_api import SyncBucketProxy __all__ = [ \"SyncStorageClient\",", "def __init__(self, url: str, headers: dict[str, str]) -> None: super().__init__(", "str, headers: dict[str, str]) -> None: super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3", "str) -> SyncBucketProxy: \"\"\"Run a storage file operation. Parameters ----------", "headers: dict[str, str]) -> None: super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\",", "from ..utils import SyncClient, __version__ from .bucket import SyncStorageBucketAPI from", "from .file_api import SyncBucketProxy __all__ = [ \"SyncStorageClient\", ] class", "f\"supabase-py/storage3 v{__version__}\", **headers}, SyncClient(), ) def from_(self, id: str) ->", "file operation. Parameters ---------- id The unique identifier of the", "operation. Parameters ---------- id The unique identifier of the bucket", "a storage file operation. Parameters ---------- id The unique identifier", "url: str, headers: dict[str, str]) -> None: super().__init__( url, {\"User-Agent\":", "id The unique identifier of the bucket \"\"\" return SyncBucketProxy(id,", "Parameters ---------- id The unique identifier of the bucket \"\"\"", "dict[str, str]) -> None: super().__init__( url, {\"User-Agent\": f\"supabase-py/storage3 v{__version__}\", **headers},", "__all__ = [ \"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets", ".file_api import SyncBucketProxy __all__ = [ \"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI):", "[ \"SyncStorageClient\", ] class SyncStorageClient(SyncStorageBucketAPI): \"\"\"Manage storage buckets and files.\"\"\"", "__version__ from .bucket import SyncStorageBucketAPI from .file_api import SyncBucketProxy __all__", "id: str) -> SyncBucketProxy: \"\"\"Run a storage file operation. Parameters", "storage file operation. Parameters ---------- id The unique identifier of" ]
[ "furnished to do so, subject # to the following conditions:", "conditions: # # The above copyright notice and this permission", "this permission notice shall be included in all copies or", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "pyjsparser import PyJsParser, Node, WrappingNode, node_to_dict from translator import translate_js,", "publish, distribute, sublicense, and/or sell copies of # the Software,", "Syntax tree has the same structure as syntax tree produced", "syntax tree produced by esprima.js Same as PyJsParser().parse For your", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "the Software, and to permit persons to whom the Software", "from pyjsparser import PyJsParser, Node, WrappingNode, node_to_dict from translator import", "parse(javascript_code): \"\"\"Returns syntax tree of javascript_code. Syntax tree has the", "above copyright notice and this permission notice shall be included", "this software and associated documentation files (the 'Software'), # to", "limitation the rights # to use, copy, modify, merge, publish,", "Software. # # THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO", "SOFTWARE __all__ = ['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict', 'parse', 'translate_js', 'translate',", "is furnished to do so, subject # to the following", "in all copies or # substantial portions of the Software.", "# # Copyright 2014, 2015 <NAME> # # Permission is", "to whom the Software is furnished to do so, subject", "without limitation the rights # to use, copy, modify, merge,", "LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR", "charge, to any person obtaining # a copy of this", "# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE", "obtaining # a copy of this software and associated documentation", "SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,", "produced by esprima.js Same as PyJsParser().parse For your convenience :)", "javascript_code. Syntax tree has the same structure as syntax tree", "do so, subject # to the following conditions: # #", "__author__ = '<NAME>' __version__ = '2.2.0' from pyjsparser import PyJsParser,", "rights # to use, copy, modify, merge, publish, distribute, sublicense,", "# a copy of this software and associated documentation files", "# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "trasnlate, syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns syntax tree of javascript_code.", "free of charge, to any person obtaining # a copy", "OR IN CONNECTION WITH THE SOFTWARE # OR THE USE", "IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "DEALINGS IN THE SOFTWARE __all__ = ['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict',", "distribute, sublicense, and/or sell copies of # the Software, and", "the rights # to use, copy, modify, merge, publish, distribute,", "# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF", "THE USE OR OTHER DEALINGS IN THE SOFTWARE __all__ =", "'<NAME>' __version__ = '2.2.0' from pyjsparser import PyJsParser, Node, WrappingNode,", "as PyJsParser().parse For your convenience :) \"\"\" p = PyJsParser()", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "'Software'), # to deal in the Software without restriction, including", "Software is furnished to do so, subject # to the", "portions of the Software. # # THE SOFTWARE IS PROVIDED", "EXPRESS OR IMPLIED, INCLUDING BUT NOT # LIMITED TO THE", "'DEFAULT_HEADER'] __author__ = '<NAME>' __version__ = '2.2.0' from pyjsparser import", "The MIT License # # Copyright 2014, 2015 <NAME> #", "OF OR IN CONNECTION WITH THE SOFTWARE # OR THE", "notice and this permission notice shall be included in all", "= ['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict', 'parse', 'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER']", "is hereby granted, free of charge, to any person obtaining", "'parse', 'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ = '<NAME>' __version__ =", "# # The above copyright notice and this permission notice", "files (the 'Software'), # to deal in the Software without", "OR OTHER DEALINGS IN THE SOFTWARE __all__ = ['PyJsParser', 'Node',", "subject # to the following conditions: # # The above", "of charge, to any person obtaining # a copy of", "of this software and associated documentation files (the 'Software'), #", "# to the following conditions: # # The above copyright", "'Node', 'WrappingNode', 'node_to_dict', 'parse', 'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ =", "person obtaining # a copy of this software and associated", "OTHER DEALINGS IN THE SOFTWARE __all__ = ['PyJsParser', 'Node', 'WrappingNode',", "# # THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY", "PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE", "and this permission notice shall be included in all copies", "Software without restriction, including without limitation the rights # to", "__version__ = '2.2.0' from pyjsparser import PyJsParser, Node, WrappingNode, node_to_dict", "shall be included in all copies or # substantial portions", "included in all copies or # substantial portions of the", "NOT # LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY", "to deal in the Software without restriction, including without limitation", "AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR", "IN THE SOFTWARE __all__ = ['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict', 'parse',", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "without restriction, including without limitation the rights # to use,", "# # Permission is hereby granted, free of charge, to", "License # # Copyright 2014, 2015 <NAME> # # Permission", "to the following conditions: # # The above copyright notice", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, #", "'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ = '<NAME>' __version__ = '2.2.0'", "PyJsParser, Node, WrappingNode, node_to_dict from translator import translate_js, trasnlate, syntax_tree_translate,", "and/or sell copies of # the Software, and to permit", "NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "and associated documentation files (the 'Software'), # to deal in", "substantial portions of the Software. # # THE SOFTWARE IS", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE #", "# Copyright 2014, 2015 <NAME> # # Permission is hereby", "copy of this software and associated documentation files (the 'Software'),", "permit persons to whom the Software is furnished to do", "associated documentation files (the 'Software'), # to deal in the", "WrappingNode, node_to_dict from translator import translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER def", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "to any person obtaining # a copy of this software", "translator import translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns syntax", "hereby granted, free of charge, to any person obtaining #", "in the Software without restriction, including without limitation the rights", "= '2.2.0' from pyjsparser import PyJsParser, Node, WrappingNode, node_to_dict from", "of # the Software, and to permit persons to whom", "'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "all copies or # substantial portions of the Software. #", "of the Software. # # THE SOFTWARE IS PROVIDED 'AS", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "sell copies of # the Software, and to permit persons", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT #", "the Software is furnished to do so, subject # to", "DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF", "'WrappingNode', 'node_to_dict', 'parse', 'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ = '<NAME>'", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns syntax tree of", "MIT License # # Copyright 2014, 2015 <NAME> # #", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "Software, and to permit persons to whom the Software is", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN", "Same as PyJsParser().parse For your convenience :) \"\"\" p =", "PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "IMPLIED, INCLUDING BUT NOT # LIMITED TO THE WARRANTIES OF", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or", "restriction, including without limitation the rights # to use, copy,", "SOFTWARE # OR THE USE OR OTHER DEALINGS IN THE", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "syntax tree of javascript_code. Syntax tree has the same structure", "syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns syntax tree of javascript_code. Syntax", "persons to whom the Software is furnished to do so,", "any person obtaining # a copy of this software and", "CONNECTION WITH THE SOFTWARE # OR THE USE OR OTHER", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "'node_to_dict', 'parse', 'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ = '<NAME>' __version__", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT # LIMITED", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "whom the Software is furnished to do so, subject #", "including without limitation the rights # to use, copy, modify,", "PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "the same structure as syntax tree produced by esprima.js Same", "copyright notice and this permission notice shall be included in", "has the same structure as syntax tree produced by esprima.js", "documentation files (the 'Software'), # to deal in the Software", "modify, merge, publish, distribute, sublicense, and/or sell copies of #", "of javascript_code. Syntax tree has the same structure as syntax", "2015 <NAME> # # Permission is hereby granted, free of", "# the Software, and to permit persons to whom the", "be included in all copies or # substantial portions of", "software and associated documentation files (the 'Software'), # to deal", "granted, free of charge, to any person obtaining # a", "structure as syntax tree produced by esprima.js Same as PyJsParser().parse", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "THE SOFTWARE __all__ = ['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict', 'parse', 'translate_js',", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE __all__", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. #", "and to permit persons to whom the Software is furnished", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN", "# Permission is hereby granted, free of charge, to any", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "<NAME> # # Permission is hereby granted, free of charge,", "# substantial portions of the Software. # # THE SOFTWARE", "CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION", "'2.2.0' from pyjsparser import PyJsParser, Node, WrappingNode, node_to_dict from translator", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "the Software without restriction, including without limitation the rights #", "node_to_dict from translator import translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code):", "# The MIT License # # Copyright 2014, 2015 <NAME>", "PyJsParser().parse For your convenience :) \"\"\" p = PyJsParser() return", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ = '<NAME>' __version__ = '2.2.0' from", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "so, subject # to the following conditions: # # The", "(the 'Software'), # to deal in the Software without restriction,", "merge, publish, distribute, sublicense, and/or sell copies of # the", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL", "= '<NAME>' __version__ = '2.2.0' from pyjsparser import PyJsParser, Node,", "USE OR OTHER DEALINGS IN THE SOFTWARE __all__ = ['PyJsParser',", "by esprima.js Same as PyJsParser().parse For your convenience :) \"\"\"", "to permit persons to whom the Software is furnished to", "same structure as syntax tree produced by esprima.js Same as", "the Software. # # THE SOFTWARE IS PROVIDED 'AS IS',", "permission notice shall be included in all copies or #", "2014, 2015 <NAME> # # Permission is hereby granted, free", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "notice shall be included in all copies or # substantial", "BUT NOT # LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "Permission is hereby granted, free of charge, to any person", "IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS", "'syntax_tree_translate', 'DEFAULT_HEADER'] __author__ = '<NAME>' __version__ = '2.2.0' from pyjsparser", "__all__ = ['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict', 'parse', 'translate_js', 'translate', 'syntax_tree_translate',", "# The above copyright notice and this permission notice shall", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT # LIMITED TO", "OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT,", "['PyJsParser', 'Node', 'WrappingNode', 'node_to_dict', 'parse', 'translate_js', 'translate', 'syntax_tree_translate', 'DEFAULT_HEADER'] __author__", "OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT", "Copyright 2014, 2015 <NAME> # # Permission is hereby granted,", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "a copy of this software and associated documentation files (the", "Node, WrappingNode, node_to_dict from translator import translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER", "from translator import translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns", "as syntax tree produced by esprima.js Same as PyJsParser().parse For", "deal in the Software without restriction, including without limitation the", "The above copyright notice and this permission notice shall be", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "OR IMPLIED, INCLUDING BUT NOT # LIMITED TO THE WARRANTIES", "OUT OF OR IN CONNECTION WITH THE SOFTWARE # OR", "tree has the same structure as syntax tree produced by", "tree produced by esprima.js Same as PyJsParser().parse For your convenience", "WITH THE SOFTWARE # OR THE USE OR OTHER DEALINGS", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER", "or # substantial portions of the Software. # # THE", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", "For your convenience :) \"\"\" p = PyJsParser() return p.parse(javascript_code)", "# to deal in the Software without restriction, including without", "sublicense, and/or sell copies of # the Software, and to", "tree of javascript_code. Syntax tree has the same structure as", "copies or # substantial portions of the Software. # #", "import translate_js, trasnlate, syntax_tree_translate, DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns syntax tree", "esprima.js Same as PyJsParser().parse For your convenience :) \"\"\" p", "# LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "def parse(javascript_code): \"\"\"Returns syntax tree of javascript_code. Syntax tree has", "DEFAULT_HEADER def parse(javascript_code): \"\"\"Returns syntax tree of javascript_code. Syntax tree", "the following conditions: # # The above copyright notice and", "\"\"\"Returns syntax tree of javascript_code. Syntax tree has the same", "THE SOFTWARE # OR THE USE OR OTHER DEALINGS IN", "to do so, subject # to the following conditions: #", "copies of # the Software, and to permit persons to", "import PyJsParser, Node, WrappingNode, node_to_dict from translator import translate_js, trasnlate,", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "following conditions: # # The above copyright notice and this", "INCLUDING BUT NOT # LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "IN CONNECTION WITH THE SOFTWARE # OR THE USE OR", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING" ]
[ "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None, NamedColors.red.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20, None,", "self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self):", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76),", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16, None,", "11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B),", "0x45), 2, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F),", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None, NamedColors.red.value, None,", "0x57), 1, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1,", "0x75), 1, 15, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2,", "0x4B), 1, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1,", "6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8,", "0x4A), 2, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2,", "2, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "0x6C), 1, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None, NamedColors.red.value, None,", "0x78), 2, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None, NamedColors.red.value,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None, NamedColors.cyan.value,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None, NamedColors.yellow.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1,", "1, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E),", "NamedColors.white.value, FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E),", "0x4E), 1, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1,", "2, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2,", "2, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None, NamedColors.white.value, FontStyleType.italic, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None, NamedColors.white.value, FontStyleType.italic, None)", "5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70),", "2, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None, NamedColors.green.value,", "13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16, None, None,", "0x59), 1, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None, NamedColors.green.value, None, None)", "2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "0x6F), 2, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2,", "b2)) for b1 in channel_2_byte_1: for b2 in byte_2_range: pac", "1, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45),", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None, NamedColors.magenta.value, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None, NamedColors.white.value, FontStyleType.italic, None)", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None, NamedColors.white.value, None, None)", "1, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15,", "source code must retain the above copyright notice, this #", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None, NamedColors.cyan.value, None, None)", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None, NamedColors.green.value,", "2, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51),", "0x7B), 1, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1,", "1, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5,", "15, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None, NamedColors.red.value,", "0x72), 1, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None, NamedColors.red.value, None, None)", "5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None,", "2, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7,", "7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D),", "1, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C),", "0x6B), 1, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72),", "6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None, NamedColors.white.value, None,", "3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12,", "1, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55),", "channel, row, indent, color, font_style, text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row())", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B),", "2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None, NamedColors.cyan.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67),", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24, None, None, TextDecorationType(underline=True))", "2, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3,", "3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None,", "15, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51),", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None, NamedColors.yellow.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4, None, None,", "2, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "0x5C), 1, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "2, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None, NamedColors.yellow.value,", "2, 15, None, NamedColors.blue.value, None, None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45),", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4, None,", "7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "2, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None, NamedColors.white.value, None,", "1, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E),", "0x14] channel_2_byte_1 = [0x19, 0x1A, 0x1D, 0x1E, 0x1F, 0x18, 0x1B,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B),", "0x55), 1, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None, NamedColors.cyan.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None, NamedColors.green.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None, NamedColors.yellow.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28, None, None,", "0x43), 1, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4, None, None,", "0x64), 1, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x5F), 2, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2,", "3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0, None, None, None)", "1, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None, NamedColors.red.value, None,", "0x7A), 2, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2,", "4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44),", "0x7B), 1, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "2, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D),", "b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2) if b2 >", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42),", "2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28, None, None,", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND", "form must reproduce the above copyright notice, # this list", "10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8,", "#!/usr/bin/env python # -*- coding: UTF-8 -*- # Copyright (c)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4, None, None, TextDecorationType(underline=True))", "1, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12,", "None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None, NamedColors.white.value,", "2, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13,", "1, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11,", "0x76), 1, 15, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2,", "0x08 == 0: # row 11 case self.assertIsNone(pac) else: self.assertIsNotNone(pac)", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None, NamedColors.white.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16, None, None,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None, NamedColors.cyan.value,", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE,", "2, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2,", "0x44), 2, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2,", "2, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C),", "0x6B), 2, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x76), 2, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2,", "5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x59), 2, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None, NamedColors.white.value, None,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None, NamedColors.white.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16, None, None, None)", "SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR #", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12, None, None, None)", "0x55), 1, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1,", "6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20,", "2, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3,", "def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "0x71), 2, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4, None, None, None)", "2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28,", "1, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6,", "9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45),", "0x65), 1, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1,", "distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4, None, None,", "7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None,", "12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None,", "0x6E), 2, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2,", "5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12,", "1, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14,", "1, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16, None, None, None)", "1, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "0x6B), 1, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1,", "0x4F), 1, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28, None, None,", "2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None, NamedColors.green.value, None,", "OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL,", "0x67), 2, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x6F), 1, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2,", "2, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3,", "FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT", "1, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15,", "8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None,", "the following disclaimer in the documentation # and/or other materials", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None, NamedColors.cyan.value,", "CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE)", "1, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4,", "10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None,", "1, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9,", "2, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2,", "0x5E), 1, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None, NamedColors.cyan.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None, NamedColors.yellow.value, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None,", "0x48), 2, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None, NamedColors.red.value, None,", "1, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None, NamedColors.blue.value, None,", "3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8,", "0x76), 2, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2,", "5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40),", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None, NamedColors.magenta.value,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None, NamedColors.magenta.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None, NamedColors.blue.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None, NamedColors.red.value, None, None)", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None, NamedColors.yellow.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A),", "0x6A), 2, 15, None, NamedColors.yellow.value, None, None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None, NamedColors.magenta.value, None, None)", "1, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16, None, None,", "0x4F), 1, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None, NamedColors.white.value, None, None) def test_scc_pac_white_underline(self):", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20, None, None, None)", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None, NamedColors.cyan.value,", "SccPreambleAddressCode.find(b1, b2) if b2 > 0x5F and b1 % 0x08", "0x54), 1, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69),", "11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "0x40), 1, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1,", "0x5B), 1, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None, NamedColors.yellow.value,", "above copyright notice, this # list of conditions and the", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8, None, None,", "1, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60),", "4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None, NamedColors.white.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8, None, None, TextDecorationType(underline=True))", "0x56), 1, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1,", "pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration,", "0x5F), 1, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None, NamedColors.red.value, None, None)", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12, None, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12, None,", "4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None, NamedColors.white.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "2, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4,", "0x71), 1, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40),", "14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16,", "1, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None, NamedColors.white.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None, NamedColors.cyan.value, None, None)", "0x42), 1, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1,", "1, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13,", "None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None, NamedColors.cyan.value, None,", "None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28, None, None, TextDecorationType(underline=True))", "0x58), 2, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28, None, None,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55),", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "that the following conditions are met: # # 1. Redistributions", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28, None, None, None)", "7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None, NamedColors.magenta.value,", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF", "1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None, NamedColors.blue.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8, None, None,", "2, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14,", "0x56), 1, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1,", "0x47), 1, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1,", "0x5A), 1, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54),", "8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D),", "0x16, 0x17, 0x10, 0x13, 0x14] channel_2_byte_1 = [0x19, 0x1A, 0x1D,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16, None, None, None)", "0x51), 2, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2,", "5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None, NamedColors.magenta.value, None, None)", "2, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24, None,", "1, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def", "2, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45),", "2, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None, NamedColors.yellow.value, None,", "0x43), 2, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2,", "0x69), 2, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2,", "= SccPreambleAddressCode.find(b1, b2) if b2 > 0x5F and b1 %", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None, NamedColors.green.value, None,", "2, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12, None, None, TextDecorationType(underline=True))", "0x45), 1, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1,", "2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None,", "1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47),", "0XFF)) byte_2_range = range(0x40, 0x80) other_bytes_1 = [item for item", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46),", "5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None, NamedColors.yellow.value, None, None)", "7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "2, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0, None, None, TextDecorationType(underline=True))", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12, None,", "2, 15, 8, None, None, None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55),", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None, NamedColors.green.value, None,", "2, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8,", "0x68), 1, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28, None, None, None)", "1, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None, NamedColors.yellow.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None, NamedColors.white.value, None, None)", "0x7C), 1, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1,", "1, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11,", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "all_range if item not in channel_1_byte_1 and item not in", "6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None,", "1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None, NamedColors.yellow.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4, None,", "2, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15,", "15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1,", "1, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0x44), 2, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16, None, None,", "13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28, None, None, None)", "2, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6,", "9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None, NamedColors.blue.value,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20, None,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28, None,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None, NamedColors.white.value, None,", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "0x6E), 1, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69),", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None, NamedColors.white.value,", "def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52),", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52),", "0x76), 2, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2,", "1, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7,", "5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "1, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A),", "7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None,", "0x6A), 2, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2,", "2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16, None,", "9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None,", "0x49), 1, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None,", "0x50), 1, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1,", "9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0, None, None,", "0x5C), 1, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1,", "0x4A), 1, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1,", "0x6A), 2, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C),", "2, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14,", "2, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59),", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16, None, None, None)", "8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4,", "0x70), 2, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2,", "3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24, None, None, TextDecorationType(underline=True))", "9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None,", "1, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None, NamedColors.magenta.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16, None,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "0x64), 2, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24, None, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None, NamedColors.red.value, None,", "0x7D), 2, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None, NamedColors.cyan.value, None, None)", "14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None, NamedColors.white.value, FontStyleType.italic,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None, NamedColors.yellow.value, None, None)", "A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None, NamedColors.magenta.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E),", "1, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13,", "2, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None, NamedColors.green.value, None, None)", "None, TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None, NamedColors.red.value,", "1, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11,", "2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12, None,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None, NamedColors.green.value, None,", "14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "0x70), 1, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None, NamedColors.magenta.value, None,", "4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None,", "11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None,", "5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20,", "= [item for item in all_range if item not in", "2, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79),", "0x5B), 1, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None, NamedColors.white.value,", "0x7C), 2, 15, 24, None, None, None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "IN ANY WAY OUT OF THE USE OF THIS #", "0x72), 2, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None, NamedColors.white.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None, NamedColors.cyan.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4, None, None,", "0x4A), 1, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1,", "0x56), 2, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2,", "2, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7,", "1, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None, NamedColors.magenta.value, None,", "14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None, NamedColors.yellow.value, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None, NamedColors.white.value, None, None)", "1, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None, NamedColors.magenta.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0, None, None, TextDecorationType(underline=True))", "15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None,", "4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None, NamedColors.yellow.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8, None, None, TextDecorationType(underline=True))", "0x75), 2, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2,", "3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28, None, None, None)", "2, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10,", "0x49), 2, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None, NamedColors.blue.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None, NamedColors.white.value, None, None)", "test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x74), 1, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None, NamedColors.blue.value, None,", "2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None,", "0x76), 2, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None, NamedColors.yellow.value, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50),", "0x7E), 2, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24, None, None, TextDecorationType(underline=True))", "1, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59),", "5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62),", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None, NamedColors.white.value, FontStyleType.italic, None)", "0x4C), 2, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0, None, None,", "10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12, None, None, TextDecorationType(underline=True))", "<filename>src/test/python/test_scc_pacs.py #!/usr/bin/env python # -*- coding: UTF-8 -*- # Copyright", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x6A), 1, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None, NamedColors.white.value, FontStyleType.italic,", "0x66), 1, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1,", "2, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12,", "or without # modification, are permitted provided that the following", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4, None,", "12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None,", "SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1 = [0x11, 0x12, 0x15, 0x16, 0x17,", "0x43), 2, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None, NamedColors.blue.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0, None,", "2, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None, NamedColors.blue.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28, None, None, None)", "0x58), 2, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None, NamedColors.white.value, FontStyleType.italic,", "0x45), 2, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0, None, None, TextDecorationType(underline=True))", "1, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51),", "1, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41),", "13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8,", "0x75), 1, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1,", "1, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None, NamedColors.green.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8, None, None, TextDecorationType(underline=True))", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None, NamedColors.green.value,", "7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None,", "0x43), 2, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12, None, None, TextDecorationType(underline=True))", "1, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None, NamedColors.green.value,", "1, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66),", "13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None,", "8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12,", "# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "1, 15, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1,", "14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None, NamedColors.white.value, FontStyleType.italic, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57),", "12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None, NamedColors.white.value,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None, NamedColors.magenta.value, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None, NamedColors.yellow.value, None, None)", "0x5A), 2, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F),", "0x4C), 2, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x55), 2, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2,", "0x47), 2, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2,", "0x53), 2, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58),", "3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16,", "0x5B), 2, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None, NamedColors.yellow.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "0x7C), 1, 15, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44),", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None, NamedColors.white.value, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12, None, None,", "EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None, NamedColors.white.value, FontStyleType.italic,", "3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None,", "12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24,", "0x59), 2, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2,", "2, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12, None, None,", "1, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16, None, None, None)", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None, NamedColors.magenta.value, None,", "5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12,", "13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None,", "13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None, NamedColors.white.value, None,", "0x60), 2, 15, None, NamedColors.white.value, None, None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None, NamedColors.red.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24, None, None, TextDecorationType(underline=True))", "1, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0, None, None, None)", "0x78), 2, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20, None,", "0x51), 2, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2,", "2, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8,", "0x44), 1, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "1, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3,", "1, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14,", "12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None,", "0x53), 1, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self):", "1, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None, NamedColors.white.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None, NamedColors.yellow.value,", "None, None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28, None,", "1, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12,", "7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None,", "1, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None, NamedColors.yellow.value, None,", "14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20,", "0x7B), 2, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2,", "2, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14,", "10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4,", "0x4F), 2, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2,", "ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "0x58), 1, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1,", "0x48), 2, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4, None, None,", "0x63), 1, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "0x4F), 1, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73),", "1, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13,", "def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28, None, None, None)", "2, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None, NamedColors.cyan.value, None, None)", "1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28, None, None,", "1, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x71), 2, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None, NamedColors.cyan.value, None, None)", "14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28,", "0x5F), 1, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1,", "0x56), 2, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16, None, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None, NamedColors.white.value,", "0x6B), 2, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8, None, None, TextDecorationType(underline=True))", "6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x13, 0x14] channel_2_byte_1 = [0x19, 0x1A, 0x1D, 0x1E, 0x1F, 0x18,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8, None, None,", "1, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None, NamedColors.cyan.value,", "12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None, NamedColors.red.value, None, None) def test_scc_pac_red_underline(self):", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A),", "AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F),", "0x5B), 2, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2,", "0x74), 2, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2,", "2, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None, NamedColors.cyan.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C),", "0x7A), 2, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None, NamedColors.green.value, None,", "0x4A), 1, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1,", "1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None,", "b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in other_bytes_1: for", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "15, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43),", "2, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None, NamedColors.yellow.value, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None, NamedColors.white.value, FontStyleType.italic,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16, None, None, TextDecorationType(underline=True))", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16, None,", "0x71), 1, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24, None, None,", "FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO", "2, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24, None, None,", "9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None,", "0x5E), 2, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79),", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None, NamedColors.blue.value,", "1, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None, NamedColors.white.value,", "copyright notice, # this list of conditions and the following", "2, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57),", "1, 15, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0x73), 2, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4, None, None,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None, NamedColors.white.value,", "0x45), 1, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1,", "4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F),", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None, NamedColors.magenta.value, None,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0, None,", "2, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11,", "NamedColors.red.value, None, None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28, None, None, TextDecorationType(underline=True))", "0x44), 1, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F),", "0x5E), 1, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1,", "0x4A), 1, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x5A), 2, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C),", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None, NamedColors.green.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D),", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None, NamedColors.white.value, FontStyleType.italic,", "3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24,", "0x53), 2, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72),", "0x7F), 2, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2,", "0x46), 2, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60),", "2, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14,", "1, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47),", "2, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7,", "8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58),", "0x55), 2, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12, None, None,", "7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42),", "5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "1, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4,", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42),", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None, NamedColors.white.value, FontStyleType.italic,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None, NamedColors.white.value, None,", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B),", "other_bytes_2 = [item for item in all_range if item not", "1, 15, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1,", "2, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24, None,", "2, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None, NamedColors.white.value,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None, NamedColors.white.value, None,", "1, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10,", "8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28, None, None,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None, NamedColors.blue.value,", "0x72), 1, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E),", "indent, color, font_style, text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent())", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None, NamedColors.green.value,", "0x6F), 1, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0, None, None,", "7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20, None,", "1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12,", "11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None,", "0x4C), 1, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1,", "4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None,", "9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20,", "14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20,", "9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None, NamedColors.blue.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None, NamedColors.blue.value, None, None)", "0x65), 2, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2,", "2, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F),", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None, NamedColors.white.value,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None, NamedColors.yellow.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0, None, None, TextDecorationType(underline=True))", "0x40), 2, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2,", "test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16, None, None,", "None, NamedColors.blue.value, None, None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None, NamedColors.green.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55),", "0x7E), 2, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None, NamedColors.white.value, None,", "0x50), 1, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1,", "4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None,", "0x49), 1, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None, NamedColors.white.value,", "2. Redistributions in binary form must reproduce the above copyright", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16, None, None, None)", "8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "0x5D), 2, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2,", "0x40), 1, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None, NamedColors.green.value,", "0x52), 2, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2,", "8, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24, None, None,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None, NamedColors.yellow.value, None,", "# # 1. Redistributions of source code must retain the", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None, NamedColors.green.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8, None, None,", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE", "2, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None, NamedColors.red.value, None, None)", "4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20,", "0x40), 2, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None, NamedColors.red.value,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None, NamedColors.white.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56),", "2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "15, 8, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1,", "3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None,", "SUCH DAMAGE. \"\"\"Unit tests for the SCC PACs\"\"\" # pylint:", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None, NamedColors.cyan.value, None,", "2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68),", "3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44),", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8, None,", "0x7B), 1, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None, NamedColors.blue.value, None,", "source and binary forms, with or without # modification, are", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None, NamedColors.red.value, None, None)", "PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE", "0x7D), 2, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E),", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None, NamedColors.cyan.value, None,", "10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D),", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4, None, None,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44),", "1, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4,", "0x69), 1, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1,", "0x76), 1, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1,", "0x6F), 1, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None, NamedColors.white.value, None, None)", "2, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "0x57), 2, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2,", "0x6E), 1, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12, None, None, TextDecorationType(underline=True))", "0x7C), 2, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None, NamedColors.white.value, FontStyleType.italic, None)", "None, None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12, None,", "1, 15, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47),", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20, None,", "0x71), 2, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D),", "0x4B), 2, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C),", "0x77), 1, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "0x62), 2, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2,", "0x44), 1, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None, NamedColors.blue.value,", "1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None,", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24, None,", "2, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46),", "in all_range if item not in channel_1_byte_1 and item not", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None, NamedColors.red.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16, None, None, None)", "11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None, NamedColors.white.value, FontStyleType.italic,", "13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None,", "0x74), 2, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2,", "2, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None, NamedColors.green.value, None,", "11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8,", "1, 15, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8, None,", "0x7B), 1, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1,", "0x6D), 2, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64),", "1, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8, None,", "4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4,", "0x52), 2, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C),", "11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4,", "1, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1,", "1, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "0x46), 2, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None, NamedColors.white.value, None,", "1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56),", "0x65), 1, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None, NamedColors.yellow.value, None,", "15, None, NamedColors.red.value, None, None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None, NamedColors.magenta.value,", "this # list of conditions and the following disclaimer. #", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None, NamedColors.green.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None, NamedColors.white.value,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None, NamedColors.magenta.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24, None, None,", "b2) if b2 > 0x5F and b1 % 0x08 ==", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28, None, None, TextDecorationType(underline=True))", "2, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None, NamedColors.yellow.value, None,", "6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None, NamedColors.blue.value, None, None)", "== 0: # row 11 case self.assertIsNone(pac) else: self.assertIsNotNone(pac) for", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24, None, None,", "Consulting LLC # # Redistribution and use in source and", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64),", "1, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0, None, None, None)", "8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None, NamedColors.green.value, None,", "0x43), 1, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2) if b2 > 0x5F", "of conditions and the following disclaimer in the documentation #", "test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None, NamedColors.blue.value, None,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28, None, None, None) def", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None, NamedColors.blue.value,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None, NamedColors.cyan.value,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None, NamedColors.white.value,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None, NamedColors.cyan.value, None,", "0x40), 1, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55),", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0, None, None,", "0x15, 0x16, 0x17, 0x10, 0x13, 0x14] channel_2_byte_1 = [0x19, 0x1A,", "0x63), 2, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E),", "test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50),", "4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None,", "0x57), 1, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1,", "2, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3,", "3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None,", "1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None,", "0x55), 2, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None, NamedColors.white.value,", "12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16, None,", "10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None,", "1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1,", "12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4, None,", "0x63), 2, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28, None, None, None)", "8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8,", "OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8, None, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None, NamedColors.red.value, None,", "6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None, NamedColors.blue.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "1, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "0x47), 1, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None, NamedColors.cyan.value, None,", "1, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8, None, None, TextDecorationType(underline=True))", "1, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "2, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "1, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None, NamedColors.blue.value,", "0x5B), 2, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24, None, None, None)", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None, NamedColors.green.value,", "12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None,", "1, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None, NamedColors.magenta.value,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None, NamedColors.white.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20, None, None, None)", "2, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13,", "OF SUCH DAMAGE. \"\"\"Unit tests for the SCC PACs\"\"\" #", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "0x6E), 1, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None, NamedColors.blue.value,", "0x69), 2, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2,", "8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None, NamedColors.green.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20, None, None, None)", "2, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D),", "5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16, None, None,", "1, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None, NamedColors.white.value, None,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None, NamedColors.white.value, FontStyleType.italic,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None, NamedColors.white.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "1, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15,", "0x51), 1, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12, None, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12, None,", "0x52), 2, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2,", "conditions and the following disclaimer in the documentation # and/or", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C),", "all_range if item not in list(byte_2_range)] for b1 in channel_1_byte_1:", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12, None, None, TextDecorationType(underline=True))", "0x56), 1, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "forms, with or without # modification, are permitted provided that", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49),", "13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12, None,", "0x7D), 1, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12, None, None, None) def test_scc_pac_indent_12_underline(self):", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None, NamedColors.magenta.value,", "2, 15, 28, None, None, None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F),", "2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None,", "3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None,", "4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None,", "7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None,", "0x4E), 1, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1,", "3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None, NamedColors.blue.value, None, None) def test_scc_pac_blue_underline(self):", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "1, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66),", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28, None,", "2, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9,", "14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None, NamedColors.green.value,", "STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28, None,", "6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None, NamedColors.white.value,", "met: # # 1. Redistributions of source code must retain", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None,", "8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56),", "0x58), 1, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "row 11 case self.assertIsNone(pac) else: self.assertIsNotNone(pac) for b2 in other_bytes_2:", "0x4F), 2, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None, NamedColors.white.value,", "0x58), 1, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0, None, None,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28, None,", "1, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14,", "1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8, None, None, TextDecorationType(underline=True)) def", "2, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None, NamedColors.red.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "byte_2_range: pac = SccPreambleAddressCode.find(b1, b2) if b2 > 0x5F and", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x72), 2, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "2, 15, 20, None, None, None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53),", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None, NamedColors.cyan.value, None, None)", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20, None, None, None)", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None, NamedColors.cyan.value, None, None)", "6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None,", "0x78), 1, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1,", "9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "(c) 2020, Sandflow Consulting LLC # # Redistribution and use", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20, None,", "conditions and the following disclaimer. # 2. Redistributions in binary", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "the above copyright notice, this # list of conditions and", "0x6F), 1, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24, None, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "OUT OF THE USE OF THIS # SOFTWARE, EVEN IF", "2, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14,", "0x79), 1, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1,", "None, None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None, NamedColors.white.value,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None, NamedColors.green.value, None,", "0x45), 2, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None, NamedColors.yellow.value, None,", "2, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None, NamedColors.white.value, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None, NamedColors.yellow.value,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None, NamedColors.magenta.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A),", "None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None, NamedColors.yellow.value,", "2, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4,", "0x4D), 1, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60),", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None, NamedColors.white.value, FontStyleType.italic, None)", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24, None, None,", "1, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11,", "modification, are permitted provided that the following conditions are met:", "12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4, None, None, None)", "2, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62),", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None, NamedColors.white.value, FontStyleType.italic,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None, NamedColors.red.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None, NamedColors.cyan.value,", "2, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "0x4F), 2, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None, NamedColors.cyan.value,", "None, NamedColors.white.value, None, None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8, None, None, TextDecorationType(underline=True))", "2, 15, 12, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58),", "0x69), 1, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16, None,", "0x6D), 2, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D),", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None, NamedColors.green.value, None,", "0x4F), 2, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2,", "1, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None, NamedColors.red.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67),", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28, None,", "list(byte_2_range)] for b1 in channel_1_byte_1: for b2 in byte_2_range: pac", "test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None, NamedColors.white.value, FontStyleType.italic, None) def", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63),", "9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None,", "1, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14,", "1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None, NamedColors.magenta.value, None, None)", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4, None,", "0x5F), 1, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1,", "0x55), 2, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2,", "2, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None, NamedColors.yellow.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "0x58), 1, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1,", "1, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5,", "2, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24, None, None, TextDecorationType(underline=True))", "0x7A), 2, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2,", "0x67), 1, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60),", "THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY", "1, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14,", "2, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None, NamedColors.white.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12, None, None,", "2, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2,", "def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20, None, None, None)", "2, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43),", "8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None, NamedColors.yellow.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20, None, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0, None, None, None)", "1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79),", "12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75),", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None, NamedColors.white.value,", "6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x40), 2, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None, NamedColors.white.value, None, None)", "0x79), 2, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2,", "15, 20, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0, None, None, TextDecorationType(underline=True))", "0x4B), 2, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2,", "0x61), 1, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None, NamedColors.white.value, FontStyleType.italic,", "0x57), 1, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1,", "PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41),", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None, NamedColors.white.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28, None, None, TextDecorationType(underline=True))", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16, None,", "2, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71),", "2, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14,", "AND CONTRIBUTORS \"AS IS\" AND # ANY EXPRESS OR IMPLIED", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None, NamedColors.red.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "= [0x11, 0x12, 0x15, 0x16, 0x17, 0x10, 0x13, 0x14] channel_2_byte_1", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24, None,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "pac, channel, row, indent, color, font_style, text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0x7F), 2, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16, None, None,", "THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF", "0, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "1, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15,", "11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16, None,", "2, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None, NamedColors.red.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12, None, None, TextDecorationType(underline=True))", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None,", "0x70), 1, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20, None, None,", "1, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6,", "10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "1, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None,", "11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28, None, None,", "0x4B), 1, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1,", "1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69),", "1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4,", "4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28,", "1, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None, NamedColors.blue.value, None,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "0x41), 1, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24, None, None,", "4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "1, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7,", "4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20,", "0x41), 1, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1,", "# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F),", "0x59), 1, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1,", "1, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4,", "15, 4, None, None, None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1,", "TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "0x62), 2, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2,", "0x58), 2, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2,", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0, None, None, TextDecorationType(underline=True))", "disable=R0201,C0115,C0116 import unittest from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from ttconv.style_properties import", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4, None, None, TextDecorationType(underline=True))", "0x69), 1, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "2, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "POSSIBILITY OF SUCH DAMAGE. \"\"\"Unit tests for the SCC PACs\"\"\"", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0, None, None,", "self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration())", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75),", "0x54), 1, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1,", "0x5C), 1, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None, NamedColors.magenta.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16, None, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4, None,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None, NamedColors.white.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44),", "2, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None, NamedColors.white.value, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None, NamedColors.white.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24,", "1, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2,", "0x45), 1, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1,", "0x58), 2, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24, None,", "2, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7,", "# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None,", "12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None, NamedColors.green.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E),", "5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42),", "0x4B), 1, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1,", "8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None, NamedColors.magenta.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73),", "1, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9,", "2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "2, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3,", "15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1,", "None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0, None, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None, NamedColors.white.value,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D),", "14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20, None, None, TextDecorationType(underline=True))", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0, None,", "for item in all_range if item not in list(byte_2_range)] for", "0x4D), 2, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2,", "7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76),", "0x7E), 1, 15, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B),", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None, NamedColors.green.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24, None, None, None)", "1, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24, None,", "import unittest from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from ttconv.style_properties import TextDecorationType,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12, None, None, TextDecorationType(underline=True))", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24,", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16, None,", "11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None, NamedColors.yellow.value, None, None)", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4, None, None,", "2, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "16, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1,", "1, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7,", "2, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None, NamedColors.green.value,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None, NamedColors.yellow.value,", "0x67), 2, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2,", "0x4E), 1, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1,", "14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12, None, None, None) def", "2, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14,", "2, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4,", "0x5D), 1, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1,", "0x7E), 1, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None, NamedColors.white.value,", "10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None, NamedColors.red.value, None, None)", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None, NamedColors.yellow.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28, None,", "2, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69),", "None, None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None, NamedColors.green.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x41), 2, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2,", "0x65), 1, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2,", "0x61), 2, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "list(range(0x00, 0XFF)) byte_2_range = range(0x40, 0x80) other_bytes_1 = [item for", "None, NamedColors.green.value, None, None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1,", "10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70),", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None, NamedColors.red.value,", "8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12,", "9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24,", "1, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14,", "10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None,", "# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "2, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15,", "10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None,", "0x65), 2, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24, None, None, TextDecorationType(underline=True))", "15, None, NamedColors.blue.value, None, None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1,", "2, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14,", "7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None, NamedColors.magenta.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12, None,", "6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20,", "10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65),", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None, NamedColors.green.value, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None, NamedColors.white.value,", "1, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11,", "0x6A), 1, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1,", "0x70), 2, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73),", "0x7C), 2, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64),", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None, NamedColors.green.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None, NamedColors.green.value,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None, NamedColors.red.value,", "12, None, None, None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12, None,", "4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None, NamedColors.green.value, None,", "if b2 > 0x5F and b1 % 0x08 == 0:", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "in binary form must reproduce the above copyright notice, #", "channel_2_byte_1: for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2) if", "binary forms, with or without # modification, are permitted provided", "2, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11,", "5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None,", "provided that the following conditions are met: # # 1.", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None, NamedColors.blue.value,", "1, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11,", "2, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7,", "6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F),", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None, NamedColors.blue.value, None,", "0x5E), 1, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B),", "0x53), 1, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1,", "2, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8,", "2, 15, 4, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54),", "0x54), 2, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x59), 2, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None, NamedColors.red.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28, None, None, None)", "0x7F), 2, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None, NamedColors.yellow.value,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46),", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57),", "12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10,", "0x43), 2, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None, NamedColors.blue.value,", "0x79), 1, 15, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2,", "12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None, NamedColors.magenta.value,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None, NamedColors.white.value, None, None)", "0x41), 1, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1,", "0x68), 2, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2,", "0x73), 1, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1,", "2, 15, None, NamedColors.red.value, None, None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None, NamedColors.magenta.value,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None, NamedColors.blue.value, None,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None, NamedColors.white.value,", "2, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10,", "13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None,", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND # ANY EXPRESS", "0x7D), 1, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20, None, None,", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8, None,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None, NamedColors.green.value, None,", "following disclaimer in the documentation # and/or other materials provided", "2, 15, 24, None, None, None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D),", "pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None,", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND #", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24, None,", "3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16,", "range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self, pac, channel, row, indent,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0x7E), 1, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1,", "1, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0, None, None, None)", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None, NamedColors.green.value,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "1, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4, None, None, TextDecorationType(underline=True))", "2, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x5C), 2, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2,", "2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4,", "2, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None, NamedColors.white.value,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8, None,", "0x51), 1, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D),", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None, NamedColors.white.value,", "OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "2, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11,", "1, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D),", "0x4C), 2, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x49), 2, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2,", "0x5C), 2, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2,", "the following conditions are met: # # 1. Redistributions of", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0, None,", "2, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6,", "6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28, None, None, TextDecorationType(underline=True))", "0x7B), 2, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16, None,", "1, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11,", "11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16, None, None, TextDecorationType(underline=True))", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None, NamedColors.magenta.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F),", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None, NamedColors.red.value,", "0x56), 1, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1,", "0x4C), 1, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12, None, None, TextDecorationType(underline=True)) def", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20, None, None, TextDecorationType(underline=True))", "item in all_range if item not in list(byte_2_range)] for b1", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A),", "9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4,", "1, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None, NamedColors.red.value, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None, NamedColors.red.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None, NamedColors.green.value, None, None)", "0x68), 1, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1,", "0x5B), 1, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None, NamedColors.yellow.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66),", "0x69), 2, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2,", "2, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15,", "1, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None, NamedColors.cyan.value, None, None)", "FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "1, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4, None, None, None)", "1, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8, None, None,", "11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16, None,", "2, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10,", "2, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None, NamedColors.white.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61),", "1, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14,", "7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None,", "15, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None,", "13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0,", "6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24, None, None, None)", "0x44), 2, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2,", "None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None, NamedColors.white.value, None,", "1. Redistributions of source code must retain the above copyright", "0x1C] all_range = list(range(0x00, 0XFF)) byte_2_range = range(0x40, 0x80) other_bytes_1", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "in channel_1_byte_1 and item not in channel_2_byte_1] other_bytes_2 = [item", "2, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28, None, None,", "0x4C), 2, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None, NamedColors.green.value,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None, NamedColors.green.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12,", "1, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15,", "2, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10,", "2, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6,", "0x52), 1, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1,", "0x68), 2, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2,", "0x4A), 2, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None, NamedColors.green.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0, None, None, TextDecorationType(underline=True))", "4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None,", "10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16, None, None, None)", "3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20,", "0x4B), 2, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8, None, None, None)", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None, NamedColors.white.value,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0, None,", "0x6E), 2, 15, None, NamedColors.white.value, FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x4A), 2, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12, None, None, TextDecorationType(underline=True))", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None, NamedColors.yellow.value, None,", "10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16,", "3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24, None, None, None) def", "7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41),", "12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None,", "3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28,", "# and/or other materials provided with the distribution. # #", "1, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50),", "5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4, None, None, TextDecorationType(underline=True))", "0x4E), 2, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2,", "2, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None, NamedColors.blue.value,", "2, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8, None, None, TextDecorationType(underline=True))", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None, NamedColors.magenta.value, None,", "1, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11,", "1, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0x6F), 1, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1,", "2, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "2, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x73), 1, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20, None, None,", "0x73), 2, 15, 4, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x5D), 1, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1,", "0x53), 2, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2,", "3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24,", "None, None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48),", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4, None,", "1, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3,", "0x78), 1, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "2, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8,", "11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4,", "8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48),", "10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0x57), 1, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1,", "9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None,", "0x60), 1, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1,", "0x67), 1, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71),", "3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None,", "and binary forms, with or without # modification, are permitted", "7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0, None,", "2, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "in other_bytes_1: for b2 in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None, NamedColors.magenta.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) def", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0, None, None,", "1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None, NamedColors.white.value, FontStyleType.italic, None)", "7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None,", "0x55), 2, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2,", "0x48), 2, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "# row 11 case self.assertIsNone(pac) else: self.assertIsNotNone(pac) for b2 in", "1, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2,", "1, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4, None,", "None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None, NamedColors.yellow.value, None,", "0x63), 1, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24, None, None,", "0x43), 2, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None, NamedColors.blue.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61),", "0x68), 2, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42),", "def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4, None, None, None)", "1, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None, NamedColors.magenta.value, None,", "0x42), 1, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56),", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None, NamedColors.cyan.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None, NamedColors.blue.value, None, None)", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None, NamedColors.yellow.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16, None, None, TextDecorationType(underline=True))", "0x54), 1, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1,", "NamedColors.green.value, None, TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64),", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None, NamedColors.blue.value,", "1, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28, None, None,", "1, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10,", "0x66), 2, 15, None, NamedColors.cyan.value, None, None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None, NamedColors.blue.value, None,", "0x56), 2, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x6D), 1, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None, NamedColors.white.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20, None, None, TextDecorationType(underline=True))", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4, None, None, None)", "Redistributions of source code must retain the above copyright notice,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None, NamedColors.green.value,", "0x1F, 0x18, 0x1B, 0x1C] all_range = list(range(0x00, 0XFF)) byte_2_range =", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY", "2, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20, None,", "9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16,", "9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16,", "1, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1,", "0x67), 1, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44),", "8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "Copyright (c) 2020, Sandflow Consulting LLC # # Redistribution and", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None, NamedColors.yellow.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76),", "12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0,", "2, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4,", "UTF-8 -*- # Copyright (c) 2020, Sandflow Consulting LLC #", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28, None, None, TextDecorationType(underline=True))", "11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None,", "2, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8, None, None,", "1, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15,", "11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4,", "b1 % 0x08 == 0: # row 11 case self.assertIsNone(pac)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None, NamedColors.red.value, None,", "2, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6,", "13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None, NamedColors.yellow.value,", "0x54), 2, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2,", "2, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None, NamedColors.yellow.value, None,", "4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None, NamedColors.blue.value, None, None)", "2, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4,", "13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None, NamedColors.yellow.value,", "8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None, NamedColors.cyan.value,", "0x12, 0x15, 0x16, 0x17, 0x10, 0x13, 0x14] channel_2_byte_1 = [0x19,", "8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None, NamedColors.red.value, None,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None, NamedColors.yellow.value,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53),", "12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16,", "2, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None, NamedColors.red.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None, NamedColors.red.value,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4, None,", "2, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69),", "1, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None, NamedColors.white.value, None,", "1, 15, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1,", "# 1. Redistributions of source code must retain the above", "0x4C), 1, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None, NamedColors.magenta.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16, None, None, TextDecorationType(underline=True))", "1, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63),", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74),", "1, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10,", "4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None,", "2, 15, None, NamedColors.white.value, FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None, NamedColors.magenta.value, None,", "12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None,", "1, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None, NamedColors.green.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72),", "0x70), 2, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None, NamedColors.blue.value, None, None)", "0x4A), 1, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12, None, None,", "0x59), 2, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12, None,", "0x7A), 1, 15, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None, NamedColors.green.value, None, None)", "1, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28, None, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None, NamedColors.white.value, FontStyleType.italic,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "PACs\"\"\" # pylint: disable=R0201,C0115,C0116 import unittest from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None, NamedColors.yellow.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "1, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None, NamedColors.white.value, None,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None, NamedColors.red.value,", "5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C),", "1, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12, None, None,", "coding: UTF-8 -*- # Copyright (c) 2020, Sandflow Consulting LLC", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67),", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20, None,", "0x44), 1, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None, NamedColors.red.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77),", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0x49), 2, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2,", "1, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5,", "must retain the above copyright notice, this # list of", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None, NamedColors.white.value, FontStyleType.italic, None)", "1, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None, NamedColors.red.value, None,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None, NamedColors.blue.value, None,", "2, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4,", "2, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None, NamedColors.yellow.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x76), 1, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B),", "1, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6,", "8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16,", "1, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10,", "15, 16, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28, None, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None, NamedColors.blue.value,", "9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8, None, None,", "1, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4, None, None, None)", "4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None, NamedColors.yellow.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None, NamedColors.cyan.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56),", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None, NamedColors.magenta.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48),", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None, NamedColors.white.value, FontStyleType.italic,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None, NamedColors.green.value,", "12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58),", "1, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12,", "0x4D), 1, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1,", "11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None, NamedColors.yellow.value, None,", "1, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13,", "1, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47),", "range(0x40, 0x80) other_bytes_1 = [item for item in all_range if", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64),", "0x7E), 2, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2,", "1, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7,", "0x49), 2, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2,", "14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "4, None, None, None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52),", "0x7A), 1, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1,", "1, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13,", "2, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None, NamedColors.cyan.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24, None, None,", "12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12,", "self.assertIsNone(pac) else: self.assertIsNotNone(pac) for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None, NamedColors.white.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20, None, None, None)", "2, 15, 20, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C),", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E),", "9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F),", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None, NamedColors.green.value, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4, None, None, None)", "0x4E), 1, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20, None,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None, NamedColors.red.value,", "1, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5,", "1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None,", "2, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67),", "USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70),", "2, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76),", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0, None, None, TextDecorationType(underline=True))", "0x70), 2, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59),", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16, None, None, None)", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None, NamedColors.white.value, FontStyleType.italic, None)", "10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20, None, None,", "THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A),", "test_scc_pac_values(self): channel_1_byte_1 = [0x11, 0x12, 0x15, 0x16, 0x17, 0x10, 0x13,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None, NamedColors.cyan.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9,", "0x78), 1, 15, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24, None, None,", "2, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D),", "0x78), 1, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4, None, None, None)", "6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None, NamedColors.red.value, None,", "1, 15, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B),", "0x45), 2, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None, NamedColors.blue.value, None,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41),", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A),", "11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None,", "2, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2,", "2, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24, None, None,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None, NamedColors.cyan.value, None,", "11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None, NamedColors.white.value, None, None)", "1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24,", "2, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66),", "1, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None,", "0x6A), 1, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0, None, None, None)", "2, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13,", "0x68), 1, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None, NamedColors.yellow.value,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None, NamedColors.cyan.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "1, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8,", "2, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12,", "the following disclaimer. # 2. Redistributions in binary form must", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4, None,", "1, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4,", "0x47), 2, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2,", "def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16, None, None, None)", "0x59), 1, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65),", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None, NamedColors.magenta.value, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None, NamedColors.white.value, FontStyleType.italic,", "0x73), 1, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1,", "0x6F), 2, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None, NamedColors.white.value,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None, NamedColors.white.value, None,", "0x53), 2, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20, None, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73),", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None, NamedColors.red.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None, NamedColors.cyan.value,", "def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None, NamedColors.blue.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None, NamedColors.magenta.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12, None, None, TextDecorationType(underline=True))", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None, NamedColors.blue.value, None,", "1, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None, NamedColors.yellow.value,", "2, 15, 24, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E),", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20, None, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None, NamedColors.white.value,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None, NamedColors.red.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None, NamedColors.yellow.value, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None, NamedColors.magenta.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12, None, None,", "0x80) other_bytes_1 = [item for item in all_range if item", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None, NamedColors.magenta.value,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None, NamedColors.white.value, FontStyleType.italic,", "0x5F), 2, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B),", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None, NamedColors.red.value,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24, None,", "NamedColors.yellow.value, None, None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24, None, None, TextDecorationType(underline=True))", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None, NamedColors.magenta.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D),", "8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None, NamedColors.magenta.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16, None, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24, None,", "0x7C), 2, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT #", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4, None, None,", "1, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "0x54), 1, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8, None, None, TextDecorationType(underline=True))", "# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20, None, None, TextDecorationType(underline=True))", "7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0, None, None, TextDecorationType(underline=True))", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None, NamedColors.white.value, None,", "24, None, None, None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None, NamedColors.white.value,", "the SCC PACs\"\"\" # pylint: disable=R0201,C0115,C0116 import unittest from ttconv.scc.codes.preambles_address_codes", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28, None, None, None)", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None, NamedColors.green.value, None,", "and b1 % 0x08 == 0: # row 11 case", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None, None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0,", "12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28,", "1, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10,", "0x57), 2, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12, None, None, TextDecorationType(underline=True))", "0x58), 1, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1,", "1, 15, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None, NamedColors.yellow.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74),", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None, NamedColors.yellow.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None, NamedColors.cyan.value, None, None) def", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None, NamedColors.green.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None, NamedColors.cyan.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28, None, None, TextDecorationType(underline=True))", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None, NamedColors.red.value,", "TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4, None, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12, None,", "def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28, None, None, TextDecorationType(underline=True))", "None, None, None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8,", "0x5E), 2, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55),", "item in all_range if item not in channel_1_byte_1 and item", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None, NamedColors.blue.value, None,", "12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None,", "0x63), 2, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None, NamedColors.green.value, None,", "1, 15, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x5B), 2, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F),", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None, NamedColors.green.value,", "2, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None, NamedColors.white.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28, None, None, TextDecorationType(underline=True))", "2, 15, 12, None, None, None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12, None, None, None)", "0x7D), 2, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12, None,", "None, None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None, NamedColors.red.value,", "2, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48),", "15, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8,", "0x6D), 1, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "materials provided with the distribution. # # THIS SOFTWARE IS", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24, None, None, None)", "0x41), 2, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58),", "2, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10,", "11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4, None, None,", "for the SCC PACs\"\"\" # pylint: disable=R0201,C0115,C0116 import unittest from", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4, None, None,", "1, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12,", "8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None,", "13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None,", "0x4D), 1, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1,", "1, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None, NamedColors.white.value,", "15, None, NamedColors.yellow.value, None, None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E),", "2, 15, 16, None, None, None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D),", "0x66), 2, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None, NamedColors.white.value, FontStyleType.italic,", "1, 15, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D),", "4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28,", "0x47), 1, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None, NamedColors.cyan.value,", "10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8, None, None, None)", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None, NamedColors.white.value,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None, NamedColors.magenta.value, None,", "2, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10,", "2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72),", "8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None, NamedColors.red.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28, None, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None, NamedColors.cyan.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "NamedColors.white.value, None, None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16, None, None, TextDecorationType(underline=True))", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None, NamedColors.green.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED.", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self):", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None, NamedColors.red.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None, NamedColors.blue.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12, None, None,", "0x44), 1, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1,", "0x45), 1, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1,", "4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56),", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None, NamedColors.magenta.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "15, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "2, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0, None,", "0x4B), 1, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12, None,", "8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52),", "2, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8,", "3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None,", "0x6E), 2, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2,", "9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B),", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None, NamedColors.yellow.value,", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None, NamedColors.magenta.value,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16, None, None,", "14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None, NamedColors.red.value,", "0x56), 2, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None, NamedColors.blue.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20, None, None,", "10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None, NamedColors.red.value, None, None)", "1, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15,", "0x48), 1, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28, None, None, TextDecorationType(underline=True))", "4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A),", "1, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0, None,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56),", "2, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None, NamedColors.green.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None, NamedColors.cyan.value, None,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73),", "12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0, None, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20, None,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None, NamedColors.blue.value,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4, None, None, TextDecorationType(underline=True))", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None, NamedColors.white.value, FontStyleType.italic,", "1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None, NamedColors.white.value, None, None)", "0x75), 2, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2,", "0x46), 1, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None, NamedColors.green.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B),", "1, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4, None, None, None)", "2, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2,", "7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16, None, None, None)", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20, None,", "0x44), 1, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64),", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "2, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6,", "0x7A), 1, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1,", "4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "1, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15,", "0x5E), 1, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28, None, None,", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\"Unit", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51),", "1, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51),", "1, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0, None, None, None)", "7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24,", "2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28,", "6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24,", "0x42), 2, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) def test_scc_pac_blue(self):", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None, NamedColors.green.value, None,", "1, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8,", "0x67), 2, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0, None, None,", "2, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4,", "1, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12, None, None, TextDecorationType(underline=True))", "0x7D), 1, 15, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2,", "4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24, None, None,", "1, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None,", "11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12,", "0x76), 2, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2,", "0x51), 1, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16, None, None,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT", "15, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "2, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12,", "0x44), 1, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73),", "0x62), 2, 15, None, NamedColors.green.value, None, None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x51), 2, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None, NamedColors.red.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44),", "0x64), 2, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2,", "2020, Sandflow Consulting LLC # # Redistribution and use in", "0x66), 1, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1,", "1, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "2, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4,", "1, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46),", "1, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6,", "13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4, None, None,", "0x7C), 1, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E),", "10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28, None,", "1, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4,", "0x4D), 2, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D),", "2, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4,", "# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28, None, None, TextDecorationType(underline=True))", "1, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8, None, None, None) def", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None, NamedColors.magenta.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24, None, None, None)", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8, None, None, None)", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71),", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8, None,", "1, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12, None,", "4, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57),", "1, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "0x69), 2, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "1, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2,", "10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None,", "2, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10,", "0x42), 2, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2,", "0x5A), 1, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None, NamedColors.white.value,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None, NamedColors.white.value, None,", "2, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None, NamedColors.red.value, None, None)", "def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12, None, None, TextDecorationType(underline=True))", "2, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8, None,", "6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None, NamedColors.red.value, None,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16, None, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "in channel_2_byte_1: for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24, None,", "0x67), 2, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "0x5B), 2, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2,", "all_range = list(range(0x00, 0XFF)) byte_2_range = range(0x40, 0x80) other_bytes_1 =", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None, NamedColors.yellow.value,", "2, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9,", "0x5B), 2, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2,", "5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None,", "0x4D), 1, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8, None, None,", "0x4D), 2, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E),", "1, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20, None, None, TextDecorationType(underline=True))", "pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1,", "1, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24, None, None, TextDecorationType(underline=True)) def", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None, NamedColors.green.value,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None, NamedColors.blue.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28, None,", "2, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None, NamedColors.cyan.value, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None, NamedColors.white.value,", "1, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13,", "2, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None, NamedColors.magenta.value, None, None)", "3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None,", "2, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20, None, None,", "13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8, None, None,", "11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16,", "2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4, None,", "6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None, NamedColors.white.value,", "0x6F), 2, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x65), 2, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2,", "test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None,", "1, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "0x42), 2, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2,", "1, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7,", "12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "15, 12, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44),", "0x58), 2, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20, None, None, None)", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None, NamedColors.blue.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None, NamedColors.yellow.value, None, None)", "0x6B), 2, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None, NamedColors.white.value, FontStyleType.italic, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None, NamedColors.red.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4, None,", "1, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self):", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49),", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None, NamedColors.white.value, FontStyleType.italic, None)", "1, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16, None,", "0x5D), 1, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) def", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None, NamedColors.blue.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4, None, None,", "0x4E), 1, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1,", "0x69), 2, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None, NamedColors.blue.value,", "2, 15, 16, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None, NamedColors.white.value, None, None)", "0x73), 1, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1,", "0x66), 2, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2,", "0x49), 1, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24, None, None,", "5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0, None, None, TextDecorationType(underline=True))", "1, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10,", "2, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "0x64), 2, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20, None, None, None)", "1, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B),", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0, None,", "14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None,", "12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77),", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None, NamedColors.blue.value, None,", "1, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None, NamedColors.yellow.value, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None, NamedColors.red.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28, None, None,", "13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None,", "6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "1, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12, None, None, None)", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20,", "1, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9,", "0x57), 2, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2,", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF", "CONTRIBUTORS \"AS IS\" AND # ANY EXPRESS OR IMPLIED WARRANTIES,", "1, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51),", "TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69),", "3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24, None, None, None)", "2, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68),", "2, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40),", "0x75), 1, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1,", "list of conditions and the following disclaimer. # 2. Redistributions", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "0x75), 2, 15, 8, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0, None, None,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None, NamedColors.green.value, None,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None, NamedColors.yellow.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x50), 2, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None, NamedColors.white.value,", "0x5A), 2, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "2, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13,", "self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style())", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING,", "2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F),", "AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24, None, None, TextDecorationType(underline=True))", "2, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D),", "0x56), 2, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D),", "8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24,", "1, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7,", "2, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8,", "2, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3,", "0x5D), 2, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None, NamedColors.yellow.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None, NamedColors.white.value, None,", "0x60), 2, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2,", "11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None, NamedColors.white.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20, None, None,", "0x64), 1, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None, NamedColors.yellow.value, None,", "0x69), 2, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2,", "2, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "2, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4, None,", "0x68), 2, 15, None, NamedColors.red.value, None, None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C),", "def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None, NamedColors.red.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None, NamedColors.yellow.value, None,", "# 2. Redistributions in binary form must reproduce the above", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "above copyright notice, # this list of conditions and the", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None, NamedColors.blue.value, None,", "0x65), 1, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A),", "0x5F), 2, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2,", "6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None,", "IS\" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None, NamedColors.green.value, None,", "0x58), 2, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8, None, None, TextDecorationType(underline=True))", "2, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None, NamedColors.blue.value, None,", "0x79), 2, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2,", "0x48), 1, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1,", "0x43), 1, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1,", "2, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28, None, None, TextDecorationType(underline=True)) if", "2, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12,", "0x5C), 2, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2,", "1, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15,", "1, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4,", "2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self):", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None,", "0x46), 2, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x6B), 1, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1,", "in list(byte_2_range)] for b1 in channel_1_byte_1: for b2 in byte_2_range:", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8, None,", "0x75), 1, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None,", "0x52), 2, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2,", "6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20, None, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None, NamedColors.red.value, None,", "0x5F), 1, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79),", "3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None,", "8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4, None, None, None)", "1, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15,", "0x40), 1, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A),", "9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None,", "5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8,", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None, NamedColors.green.value, None, None)", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None, NamedColors.red.value,", "0x42), 1, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1,", "0x71), 2, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None, NamedColors.blue.value, None, None)", "13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None, NamedColors.white.value,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None, NamedColors.red.value,", "0x6E), 2, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F),", "12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16, None, None, None)", "2, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None, NamedColors.green.value, None, None)", "0x42), 2, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0, None,", "2, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4, None, None, None)", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None, NamedColors.yellow.value, None, None)", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None, NamedColors.white.value, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None, NamedColors.blue.value,", "0x50), 1, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1,", "1, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13,", "10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C),", "0x59), 2, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2,", "1, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2,", "1, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "0x79), 1, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1,", "def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8, None, None, None)", "5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None,", "8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28,", "2, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28, None, None,", "1, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D),", "3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F),", "12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None,", "1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "1, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12, None, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None, NamedColors.blue.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None, NamedColors.blue.value, None, None)", "b2)) def check_scc_pac_attributes(self, pac, channel, row, indent, color, font_style, text_decoration):", "1, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A),", "4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F),", "[item for item in all_range if item not in list(byte_2_range)]", "2, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E),", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None, NamedColors.magenta.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "2, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5,", "12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20,", "TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None, NamedColors.cyan.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8, None, None,", "0x6B), 2, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None, NamedColors.blue.value, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None, NamedColors.white.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20, None, None,", "3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4, None,", "6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8, None, None, None)", "1, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None, NamedColors.yellow.value, None, None)", "1, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0, None,", "7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61),", "11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None, NamedColors.white.value, None, None)", "0x66), 1, 15, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2,", "1, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8, None, None,", "13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None, NamedColors.blue.value,", "8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None,", "10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8,", "0x44), 2, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None,", "13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D),", "9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0,", "14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46),", "1, 15, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None, NamedColors.white.value, FontStyleType.italic, None)", "2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4, None,", "CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A),", "2, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50),", "13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4, None, None,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F),", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None, NamedColors.white.value, None,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None, NamedColors.yellow.value, None,", "0x4A), 2, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None,", "1, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66),", "2, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4,", "1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None,", "12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8, None, None, TextDecorationType(underline=True))", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None, NamedColors.magenta.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) def", "2, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x62), 1, 15, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "1, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6,", "0x64), 1, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12,", "2, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24, None, None,", "0x7D), 1, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None, NamedColors.white.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12, None, None, TextDecorationType(underline=True))", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None, NamedColors.magenta.value,", "0x7F), 2, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2,", "12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43),", "0x76), 2, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2,", "1, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4, None, None, TextDecorationType(underline=True))", "4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75),", "and use in source and binary forms, with or without", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24, None,", "2, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12,", "0x7A), 1, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1,", "3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4, None, None, None)", "b2)) for b1 in other_bytes_1: for b2 in range(0x00, 0xFF):", "2, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4, None, None,", "0x47), 2, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2,", "IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None, NamedColors.magenta.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78),", "14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x7F), 1, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47),", "1, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24, None, None,", "the documentation # and/or other materials provided with the distribution.", "1, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7,", "8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28,", "13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None,", "0x7F), 2, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2,", "10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None,", "tests for the SCC PACs\"\"\" # pylint: disable=R0201,C0115,C0116 import unittest", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4, None, None, None)", "0x62), 1, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None, NamedColors.green.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None, NamedColors.white.value, None, None)", "2, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12, None, None, None)", "1, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20, None,", "1, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4,", "8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4,", "4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24,", "0x6A), 2, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2,", "0x6C), 2, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None, NamedColors.red.value, None, None)", "9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None,", "the above copyright notice, # this list of conditions and", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "0x47), 2, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2,", "0x44), 2, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2,", "11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8,", "0x4C), 2, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2,", "6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None,", "1, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9,", "5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75),", "0x62), 1, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1,", "2, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8,", "0x52), 1, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54),", "0x74), 2, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None,", "0x68), 2, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71),", "9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "not in list(byte_2_range)] for b1 in channel_1_byte_1: for b2 in", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x46), 2, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None,", "8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16, None, None,", "2, 15, 4, None, None, None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53),", "1, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None, NamedColors.yellow.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24, None, None, None)", "None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28, None, None,", "2, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15,", "9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B),", "2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20, None,", "and the following disclaimer. # 2. Redistributions in binary form", "7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B),", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8, None,", "and item not in channel_2_byte_1] other_bytes_2 = [item for item", "15, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75),", "0x57), 2, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2,", "6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None, NamedColors.white.value, None,", "1, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61),", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None,", "0x79), 1, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28, None, None,", "0x5E), 1, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None,", "2, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13,", "0x73), 2, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2,", "2, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7,", "5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None,", "13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48),", "10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16,", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND #", "0x6F), 2, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2,", "6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58),", "LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50),", "0x7D), 2, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2,", "4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0,", "1, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24, None,", "0x5C), 1, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1,", "7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None,", "0x6F), 1, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71),", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None, NamedColors.white.value,", "0x5C), 2, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2,", "2, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None, NamedColors.green.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0, None,", "0x76), 1, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54),", "2, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None, NamedColors.white.value, FontStyleType.italic,", "0x6D), 2, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0, None, None, TextDecorationType(underline=True))", "item not in list(byte_2_range)] for b1 in channel_1_byte_1: for b2", "11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None, NamedColors.yellow.value,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None, NamedColors.cyan.value, None,", "2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42),", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None, NamedColors.magenta.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24,", "8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B),", "1, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None, NamedColors.cyan.value, None,", "0x6F), 2, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2,", "12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None,", "9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None,", "3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None, NamedColors.red.value,", "None, None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8, None,", "11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None, NamedColors.yellow.value,", "2, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4, None, None, None)", "0x59), 2, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20, None, None, TextDecorationType(underline=True))", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None, NamedColors.yellow.value,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None, NamedColors.white.value,", "0x5E), 2, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2,", "0x65), 2, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2,", "0x56), 1, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None, NamedColors.blue.value, None, None)", "0x4A), 2, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "1, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16, None, None,", "TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28, None, None, None)", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66),", "10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79),", "0x54), 1, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1,", "1, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None, NamedColors.cyan.value, None,", "14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None,", "0x70), 2, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20, None, None,", "1, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None, NamedColors.magenta.value, None, None)", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4, None, None, TextDecorationType(underline=True))", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None, NamedColors.green.value,", "DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "0x5E), 2, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0, None, None, TextDecorationType(underline=True))", "6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D),", "0x4D), 1, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1,", "0x77), 1, 15, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2,", "case self.assertIsNone(pac) else: self.assertIsNotNone(pac) for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2))", "4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None,", "0x50), 1, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None, NamedColors.yellow.value,", "1, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12,", "0x43), 1, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1,", "13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None,", "2, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54),", "1, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10,", "channel_1_byte_1 and item not in channel_2_byte_1] other_bytes_2 = [item for", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16, None, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None, NamedColors.white.value, FontStyleType.italic,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24, None, None,", "12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12, None, None, None)", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None, NamedColors.magenta.value,", "2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None, NamedColors.green.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73),", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None, NamedColors.magenta.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55),", "0x48), 1, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4, None, None, None)", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None, NamedColors.white.value,", "1, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12,", "14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B),", "0x78), 2, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2,", "1, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3,", "COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None, NamedColors.blue.value, None,", "0x4E), 2, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2,", "0x50), 2, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3,", "1, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4, None, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None, NamedColors.red.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None, NamedColors.magenta.value, None, None)", "0x5D), 1, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "1, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12, None, None,", "14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24,", "13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None, NamedColors.red.value, None,", "2, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2,", "if item not in list(byte_2_range)] for b1 in channel_1_byte_1: for", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x5A), 1, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None, NamedColors.white.value, None, None) def", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None, NamedColors.yellow.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0, None, None, None)", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28, None,", "1, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20, None, None, None)", "0x42), 1, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "0x68), 1, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1,", "1, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "15, 24, None, None, None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C),", "14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None,", "7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16,", "1, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24, None, None,", "0x41), 1, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1,", "0x56), 1, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1,", "3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None,", "0x6C), 2, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x4E), 2, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79),", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None, NamedColors.white.value,", "2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None,", "NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20, None, None, TextDecorationType(underline=True))", "6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None, NamedColors.blue.value,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20, None,", "2, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F),", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None, NamedColors.green.value,", "0x6B), 1, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1,", "6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None,", "15, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0, None,", "1, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79),", "2, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15,", "0x46), 1, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1,", "1, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4, None,", "2, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self):", "0x54), 2, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2,", "2, 15, None, NamedColors.cyan.value, None, None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71),", "def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None, NamedColors.red.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8, None, None, None)", "2, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None, NamedColors.red.value, None, None)", "0x54), 1, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28,", "7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28,", "OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\"Unit tests for the", "8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12, None, None,", "0x43), 1, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None, NamedColors.red.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55),", "0x47), 1, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None, NamedColors.blue.value,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None, NamedColors.blue.value, None,", "1, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48),", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None, NamedColors.red.value,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8, None,", "0x5D), 2, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2,", "1, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8, None, None,", "2, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52),", "13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F),", "test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10,", "0x48), 2, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20, None, None,", "2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0,", "2, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60),", "# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20,", "0x1B, 0x1C] all_range = list(range(0x00, 0XFF)) byte_2_range = range(0x40, 0x80)", "2, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68),", "0x63), 2, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None, NamedColors.green.value, None, None)", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None, NamedColors.blue.value,", "0x4D), 2, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45),", "1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4,", "2, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15,", "0x46), 2, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2,", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24,", "1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12, None, None,", "0x51), 2, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2,", "for b1 in channel_1_byte_1: for b2 in byte_2_range: pac =", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75),", "in source and binary forms, with or without # modification,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None, NamedColors.blue.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0, None, None,", "0x60), 1, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8, None, None,", "0x45), 2, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43),", "8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None,", "1, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13,", "0x6D), 1, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None, NamedColors.magenta.value, None,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) def", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77),", "0x4C), 1, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12, None,", "1, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20, None,", "0x4B), 1, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None, NamedColors.blue.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8, None,", "7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None, NamedColors.yellow.value,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None, NamedColors.magenta.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None, NamedColors.blue.value, None, None)", "1, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4, None, None,", "1, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12,", "2, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13,", "2, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9,", "5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16, None, None,", "0x47), 2, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0, None, None, None)", "0x67), 1, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1,", "14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28, None,", "10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A),", "1, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5,", "4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4,", "1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4, None, None,", "with or without # modification, are permitted provided that the", "0x52), 1, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69),", "1, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12, None,", "0x5A), 1, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None, NamedColors.yellow.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28, None, None, None)", "2, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8,", "15, None, NamedColors.white.value, FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None, NamedColors.red.value, None, None) def", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20, None, None, None)", "0x41), 2, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x79), 2, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B),", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None, NamedColors.yellow.value, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4, None,", "0x5B), 2, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0, None, None,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24, None,", "0x7E), 2, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None, NamedColors.white.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None, NamedColors.magenta.value,", "2, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11,", "test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0, None, None,", "2, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10,", "3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28, None, None,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None, NamedColors.magenta.value, None,", "row, indent, color, font_style, text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None, NamedColors.white.value,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None, NamedColors.white.value, FontStyleType.italic,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28, None, None,", "2, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8, None,", "2, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5,", "are met: # # 1. Redistributions of source code must", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None, NamedColors.yellow.value, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None, NamedColors.white.value, FontStyleType.italic, None)", "0x69), 2, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2,", "0x62), 1, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10,", "2, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11,", "5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None,", "0x59), 1, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1,", "1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None, NamedColors.cyan.value, None, None)", "test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None, NamedColors.white.value, FontStyleType.italic,", "15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None,", "13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None,", "11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45),", "1, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12,", "2, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3,", "2, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4, None, None, None)", "1, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13,", "2, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77),", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None, NamedColors.magenta.value, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None, NamedColors.white.value, None,", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None, NamedColors.green.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16, None, None, None)", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45),", "14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None, NamedColors.cyan.value,", "0x54), 2, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28, None,", "2, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11,", "1, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14,", "0x73), 2, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2,", "for b1 in channel_2_byte_1: for b2 in byte_2_range: pac =", "1, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77),", "conditions are met: # # 1. Redistributions of source code", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None, NamedColors.white.value,", "0x59), 2, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2,", "1, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "[0x19, 0x1A, 0x1D, 0x1E, 0x1F, 0x18, 0x1B, 0x1C] all_range =", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None, NamedColors.cyan.value,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12, None,", "7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0, None,", "5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None,", "2, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8,", "1, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24, None, None,", "0x79), 2, 15, 16, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28, None,", "1, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None, NamedColors.blue.value, None,", "channel_1_byte_1 = [0x11, 0x12, 0x15, 0x16, 0x17, 0x10, 0x13, 0x14]", "0x57), 2, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2,", "9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None,", "0x6B), 2, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2,", "4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28,", "10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D),", "BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None, NamedColors.green.value,", "1, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4,", "NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None, NamedColors.green.value, None, None)", "0x77), 2, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0, None, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E),", "5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None,", "3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None,", "9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None, NamedColors.white.value, None,", "13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None, NamedColors.white.value, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D),", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None, NamedColors.yellow.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0, None, None, TextDecorationType(underline=True))", "1, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None, NamedColors.white.value, FontStyleType.italic,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8, None, None, None)", "0x5D), 1, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None, NamedColors.cyan.value, None, None)", "1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28, None, None, None)", "0x45), 2, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8, None, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68),", "1, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9,", "0x40), 2, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2,", "0x4B), 1, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8, None, None, None)", "13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66),", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45),", "13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None, NamedColors.cyan.value, None,", "1, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15,", "1, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4,", "2, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None, NamedColors.magenta.value, None, None)", "0x46), 1, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20, None, None,", "12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None,", "2, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None, NamedColors.red.value,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None, NamedColors.magenta.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None, NamedColors.green.value, None, None) def", "13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24,", "0x75), 1, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1,", "13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None,", "0x7A), 2, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2,", "5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B),", "other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in channel_2_byte_1: for b2 in", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4, None, None, None) def test_scc_pac_indent_4_underline(self):", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52),", "def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None, NamedColors.green.value, None, None)", "2, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "1, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None, NamedColors.magenta.value, None,", "1, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "1, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None, NamedColors.blue.value, None,", "0x66), 2, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None, NamedColors.cyan.value, None,", "10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None, NamedColors.cyan.value,", "in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in other_bytes_1: for b2", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C),", "2, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20, None, None,", "3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20, None,", "0x77), 2, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2,", "2, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15,", "4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65),", "11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16,", "0x61), 2, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2,", "2, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6,", "2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4, None, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None, NamedColors.white.value, None,", "1, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None, NamedColors.yellow.value, None, None)", "0x59), 2, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2,", "0x44), 1, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28, None, None,", "def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0, None, None, None)", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77),", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None, NamedColors.white.value, None,", "13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "0x4E), 2, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4,", "2, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4,", "ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None, NamedColors.cyan.value, None, None)", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None, NamedColors.cyan.value, None,", "12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16, None, None, None)", "15, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24,", "0x61), 2, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2,", "0x74), 1, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1,", "2, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B),", "12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28, None, None,", "1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0,", "0x4E), 2, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68),", "1, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12,", "2, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "2, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14,", "5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20,", "0x5C), 2, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2,", "1, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24, None, None, None)", "CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None, NamedColors.white.value, FontStyleType.italic,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x57), 1, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E),", "2, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None, NamedColors.green.value,", "2, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12, None, None,", "1, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24, None,", "notice, this # list of conditions and the following disclaimer.", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20,", "2, 15, 0, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52),", "2, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None, NamedColors.yellow.value,", "14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None, NamedColors.red.value, None, None)", "0x69), 1, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1,", "TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None, NamedColors.red.value, None,", "15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1,", "2, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8, None, None, TextDecorationType(underline=True))", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None, NamedColors.white.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62),", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None, NamedColors.green.value, None,", "6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None, NamedColors.white.value, FontStyleType.italic, None)", "6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24,", "0x4A), 1, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1,", "1, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12, None, None, None)", "0x5C), 2, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "2, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16, None, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None, NamedColors.blue.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12, None,", "1, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0, None, None,", "3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4,", "9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0, None, None, TextDecorationType(underline=True))", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY", "0x6A), 1, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None, NamedColors.magenta.value,", "13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None,", "7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None,", "0x78), 1, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8, None, None, TextDecorationType(underline=True))", "1, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10,", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x7B), 2, 15, 20, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4, None, None, None)", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None, NamedColors.magenta.value, None,", "0x53), 2, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2,", "0x48), 1, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x71), 2, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20,", "3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None, NamedColors.red.value, None, None)", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16, None, None,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None, NamedColors.blue.value, None, None)", "1, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2,", "5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x43), 2, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48),", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28, None,", "0x77), 1, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None, NamedColors.cyan.value, None, None) def test_scc_pac_cyan_underline(self):", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49),", "2, 10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58),", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None, NamedColors.yellow.value, None,", "2, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None, NamedColors.yellow.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self):", "1, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None, NamedColors.yellow.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57),", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64),", "0x1A, 0x1D, 0x1E, 0x1F, 0x18, 0x1B, 0x1C] all_range = list(range(0x00,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C),", "9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42),", "7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None,", "9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20, None, None, None)", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8, None, None,", "1, 15, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4, None, None,", "0x6F), 2, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2,", "0x4E), 1, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0, None,", "4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16, None,", "TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28, None, None,", "1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None, NamedColors.red.value, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0, None, None, TextDecorationType(underline=True))", "0x66), 2, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2,", "1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "0x77), 2, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F),", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None, NamedColors.blue.value,", "IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x5C), 1, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None, NamedColors.yellow.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15,", "3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28,", "ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "2, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15,", "0x77), 2, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None, NamedColors.yellow.value, None,", "6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None, NamedColors.red.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8, None, None, None)", "2, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None, NamedColors.cyan.value,", "2, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10,", "11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None,", "0x50), 1, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42),", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None, NamedColors.cyan.value, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12, None,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None, NamedColors.red.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45),", "0x4E), 1, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12, None, None, TextDecorationType(underline=True))", "10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None, NamedColors.white.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79),", "0x71), 2, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2,", "0x50), 2, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2,", "5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None, NamedColors.yellow.value, None, None)", "1, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69),", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20, None,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12, None, None,", "12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24, None, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B),", "0x77), 2, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24, None,", "0x71), 1, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None, NamedColors.red.value,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None, NamedColors.magenta.value,", "2, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5,", "0x46), 2, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2,", "15, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20,", "2, 15, None, NamedColors.yellow.value, None, None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24, None, None,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "1, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6,", "2, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None,", "2, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9,", "11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4, None, None,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "0x54), 2, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24, None, None, TextDecorationType(underline=True))", "test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "1, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16, None, None, None)", "8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None,", "1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16, None,", "5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None,", "8, None, None, None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1,", "7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20,", "12, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1,", "3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12, None, None,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None, NamedColors.magenta.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24, None, None, None)", "-*- coding: UTF-8 -*- # Copyright (c) 2020, Sandflow Consulting", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C),", "10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None,", "0x17, 0x10, 0x13, 0x14] channel_2_byte_1 = [0x19, 0x1A, 0x1D, 0x1E,", "0x66), 2, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None, NamedColors.white.value,", "2, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13,", "TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None, NamedColors.blue.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None, NamedColors.cyan.value, None, None)", "2, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self, pac, channel, row, indent, color, font_style,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "1, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13,", "0x5F), 2, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2,", "1, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None, NamedColors.blue.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4, None, None,", "2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16, None,", "1, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None, NamedColors.white.value, None,", "def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None, NamedColors.white.value, FontStyleType.italic, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None, NamedColors.blue.value,", "0x4A), 2, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2,", "2, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16, None, None, TextDecorationType(underline=True))", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20, None,", "2, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12,", "4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None,", "1, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "0x4F), 2, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0x67), 1, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2,", "0x4D), 2, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2,", "0x75), 2, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2,", "15, None, NamedColors.cyan.value, None, None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1,", "13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None,", "5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C),", "0x7E), 1, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14,", "2, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16, None, None,", "5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None,", "0x7F), 1, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12, None, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None, NamedColors.red.value, None,", "2, 6, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4, None, None, TextDecorationType(underline=True))", "0x7F), 1, 15, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None, NamedColors.blue.value, None, None)", "1, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3,", "1, 9, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24, None,", "15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1,", "the distribution. # # THIS SOFTWARE IS PROVIDED BY THE", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None, NamedColors.green.value, None, None)", "0x55), 1, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4, None, None, TextDecorationType(underline=True))", "7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20, None, None,", "2, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67),", "2, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5,", "11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50),", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "1, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x79), 1, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77),", "# list of conditions and the following disclaimer. # 2.", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8,", "1, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None, NamedColors.magenta.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28, None, None,", "1, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None, NamedColors.blue.value, None,", "12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None, NamedColors.white.value,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76),", "0x4B), 2, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2,", "import TextDecorationType, NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1 =", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "2, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6,", "14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None, NamedColors.white.value, None,", "12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12, None, None,", "def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F),", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8, None,", "1, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54),", "14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None,", "6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0, None, None, None)", "12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None, NamedColors.green.value, None, None)", "0x75), 2, 13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2,", "channel_2_byte_1] other_bytes_2 = [item for item in all_range if item", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "NamedColors.red.value, None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0, None, None, TextDecorationType(underline=True))", "0x41), 2, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None, NamedColors.white.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D),", "5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C),", "0x63), 2, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None, NamedColors.white.value, None,", "15, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F),", "# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "0x72), 1, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None, NamedColors.red.value, None, None)", "def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None, NamedColors.yellow.value, None, None)", "0x62), 2, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2,", "0x43), 1, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None, NamedColors.yellow.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16, None, None,", "0x7F), 2, 15, 28, None, None, TextDecorationType(underline=True)) if __name__ ==", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4, None, None, None)", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24, None, None, TextDecorationType(underline=True))", "FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None, NamedColors.red.value, None, None)", "0x53), 2, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2,", "1, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20, None, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12, None, None,", "1, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55),", "0x62), 2, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None, NamedColors.magenta.value, None,", "1, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None, NamedColors.yellow.value,", "8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C),", "5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None, NamedColors.red.value, None, None)", "must reproduce the above copyright notice, # this list of", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16, None, None,", "6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20,", "1, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None, NamedColors.white.value, FontStyleType.italic,", "0x72), 1, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None, NamedColors.yellow.value,", "5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None,", "0x7F), 1, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None, NamedColors.green.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12, None, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None, NamedColors.cyan.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "1, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4, None, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None, NamedColors.white.value, FontStyleType.italic, None)", "15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68),", "0x4D), 2, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None, NamedColors.cyan.value, None, None)", "2, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A),", "7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28,", "20, None, None, None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1,", "of source code must retain the above copyright notice, this", "2, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13,", "0x52), 2, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None, NamedColors.cyan.value, None,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None, NamedColors.yellow.value, None,", "disclaimer. # 2. Redistributions in binary form must reproduce the", "6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None, NamedColors.white.value,", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO,", "1, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6,", "10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None,", "2, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None, NamedColors.green.value, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None, NamedColors.yellow.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4, None, None,", "1, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10,", "2, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15,", "0x42), 1, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A),", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None, NamedColors.cyan.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24, None, None,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None, NamedColors.cyan.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None,", "1, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None, NamedColors.red.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "0x73), 2, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None, NamedColors.yellow.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C),", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None, NamedColors.yellow.value, None,", "0x42), 2, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2,", "0x57), 2, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24, None, None,", "2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None,", "0x63), 1, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "0x6C), 1, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None, NamedColors.magenta.value,", "0x6A), 1, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4, None, None, TextDecorationType(underline=True)) def", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None, NamedColors.blue.value,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None, NamedColors.cyan.value,", "2, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13,", "10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52),", "0x56), 1, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1,", "0x40), 2, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None, NamedColors.magenta.value, None, None)", "0x79), 2, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None, NamedColors.white.value,", "1, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8,", "15, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12,", "1, 15, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16,", "1, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15,", "10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None, NamedColors.yellow.value, None,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None, NamedColors.blue.value,", "1, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8,", "1, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8,", "15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None,", "2, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15,", "1, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "LLC # # Redistribution and use in source and binary", "4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A),", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None, NamedColors.white.value, FontStyleType.italic, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16, None, None,", "not in channel_2_byte_1] other_bytes_2 = [item for item in all_range", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None, NamedColors.yellow.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0, None, None, TextDecorationType(underline=True))", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None, NamedColors.red.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None, NamedColors.white.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52),", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "0x7B), 2, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2,", "2, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3,", "14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None, NamedColors.cyan.value, None, None)", "0x5A), 2, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20, None,", "2, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9,", "2, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8, None, None,", "0x72), 1, 15, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None, NamedColors.green.value, None,", "-*- # Copyright (c) 2020, Sandflow Consulting LLC # #", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24, None, None, None)", "0x5E), 2, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E),", "0x46), 1, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1,", "10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None, NamedColors.magenta.value, None, None) def test_scc_pac_magenta_underline(self):", "b2 in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self, pac, channel,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C),", "0x5A), 2, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16, None, None,", "14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None,", "2, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76),", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None, NamedColors.magenta.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F),", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None, NamedColors.red.value, None,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None, NamedColors.green.value,", "NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1 = [0x11, 0x12,", "1, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11,", "0x47), 1, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20, None,", "0x5D), 1, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8,", "1, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14,", "None, None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None, NamedColors.magenta.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28, None, None, TextDecorationType(underline=True))", "= range(0x40, 0x80) other_bytes_1 = [item for item in all_range", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16, None,", "0x41), 1, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None, NamedColors.yellow.value,", "2, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9,", "b2 > 0x5F and b1 % 0x08 == 0: #", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4, None, None, TextDecorationType(underline=True))", "4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28, None, None,", "channel_1_byte_1: for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2) if", "0x57), 2, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None, NamedColors.blue.value, None, None)", "2, 15, None, NamedColors.white.value, None, None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41),", "1, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None, NamedColors.red.value, None,", "THE POSSIBILITY OF SUCH DAMAGE. \"\"\"Unit tests for the SCC", "10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28,", "7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16,", "5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8, None, None,", "other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in other_bytes_1: for b2 in", "15, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None,", "test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F),", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None, NamedColors.red.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "0x6C), 2, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None, NamedColors.magenta.value,", "1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None, NamedColors.cyan.value,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None, NamedColors.cyan.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58),", "12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E),", "7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None,", "14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None,", "5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4,", "2, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28, None, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16, None,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None, NamedColors.red.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D),", "3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None,", "7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None,", "2, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0, None, None,", "TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D),", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20, None,", "0x60), 1, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None, NamedColors.red.value,", "14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12, None, None,", "2, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5,", "0x5F), 1, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1,", "1, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8,", "0x72), 2, 15, 4, None, None, None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "ARISING IN ANY WAY OUT OF THE USE OF THIS", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12, None, None, None)", "2, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13,", "HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None, NamedColors.yellow.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None, NamedColors.magenta.value, None, None)", "2, 15, None, NamedColors.magenta.value, None, None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D),", "1, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2,", "1, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None, NamedColors.blue.value,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16, None, None, TextDecorationType(underline=True))", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24, None, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None, NamedColors.yellow.value,", "5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8, None, None,", "3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12,", "0x40), 2, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None, NamedColors.yellow.value,", "1, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0, None, None, None)", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28, None,", "11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None,", "10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None,", "1, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5,", "9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0, None, None,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12, None,", "1, 15, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1,", "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None, NamedColors.magenta.value, None,", "0x6C), 2, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "2, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8, None, None, TextDecorationType(underline=True))", "4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D),", "13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None,", "0x53), 1, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1,", "13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None,", "14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None, NamedColors.red.value, None,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C),", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None, NamedColors.yellow.value,", "0x65), 1, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49),", "9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12,", "2, 1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4, None,", "0x55), 2, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "2, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None, NamedColors.green.value,", "20, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1,", "1, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20, None, None,", "pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None, NamedColors.white.value, None,", "item not in channel_2_byte_1] other_bytes_2 = [item for item in", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "other materials provided with the distribution. # # THIS SOFTWARE", "0x51), 2, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28, None,", "without # modification, are permitted provided that the following conditions", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS", "2, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C),", "FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None, NamedColors.white.value,", "0x65), 2, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x60), 1, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType class", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None, NamedColors.yellow.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12, None, None,", "1, 15, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1,", "0x60), 2, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2,", "SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24, None, None, TextDecorationType(underline=True))", "1, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x5D), 2, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2,", "6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None,", "0x64), 2, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57),", "= [0x19, 0x1A, 0x1D, 0x1E, 0x1F, 0x18, 0x1B, 0x1C] all_range", "0x1D, 0x1E, 0x1F, 0x18, 0x1B, 0x1C] all_range = list(range(0x00, 0XFF))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46),", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None, NamedColors.red.value, None,", "2, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None, NamedColors.white.value, None, None)", "0x46), 1, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1,", "1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None, NamedColors.green.value, None, None)", "0x6D), 1, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1,", "ANY WAY OUT OF THE USE OF THIS # SOFTWARE,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None, NamedColors.magenta.value,", "12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0, None, None,", "0x5D), 2, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8, None, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28, None, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None, NamedColors.green.value,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None, NamedColors.white.value, FontStyleType.italic, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0, None, None, None)", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None, NamedColors.green.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None, NamedColors.green.value, None, None)", "0x6B), 2, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None, NamedColors.white.value, FontStyleType.italic,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0, None, None, None)", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74),", "0x49), 2, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2,", "2, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9,", "1, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70),", "3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None, NamedColors.magenta.value,", "3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28, None, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None, NamedColors.cyan.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None, NamedColors.magenta.value, None,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None, NamedColors.cyan.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28, None, None, TextDecorationType(underline=True))", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28, None, None, None) def test_scc_pac_indent_28_underline(self):", "9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None,", "0x52), 1, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1,", "9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24,", "for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2) if b2", "6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None,", "2, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16, None, None, None)", "1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16, None, None,", "test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C),", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None, NamedColors.magenta.value,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None, NamedColors.cyan.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28, None, None, None)", "0x5E), 2, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2,", "9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None,", "TextDecorationType(underline=True)) def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24, None, None,", "2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None,", "def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24, None, None, TextDecorationType(underline=True))", "2, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12, None, None, None)", "4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8,", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND # ANY", "check_scc_pac_attributes(self, pac, channel, row, indent, color, font_style, text_decoration): self.assertEqual(channel, pac.get_channel())", "2, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43),", "0x6A), 2, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58),", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8, None,", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\"Unit tests", "0x78), 2, 15, 16, None, None, None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48),", "0x50), 1, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1,", "None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A),", "0x4D), 1, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71),", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None, NamedColors.red.value,", "2, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8, None,", "1, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3,", "0x41), 2, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24, None,", "# # Redistribution and use in source and binary forms,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None, NamedColors.magenta.value, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x6D), 2, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20, None, None, None)", "1, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "16, None, None, None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; #", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68),", "1, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66),", "0x68), 2, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C),", "0x54), 2, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2,", "0x61), 1, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1,", "1, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74),", "14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None, NamedColors.yellow.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16, None, None, TextDecorationType(underline=True))", "11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None,", "2, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8, None, None, TextDecorationType(underline=True))", "0x56), 2, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2,", "0x45), 1, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x40), 1, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4, None, None, None)", "2, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "2, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1 = [0x11, 0x12, 0x15,", "0x67), 2, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None, NamedColors.red.value,", "0x7D), 1, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x5A), 2, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None, NamedColors.yellow.value, None,", "2, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10,", "2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20,", "% 0x08 == 0: # row 11 case self.assertIsNone(pac) else:", "2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8, None,", "0x77), 1, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1,", "14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None, NamedColors.yellow.value, None, None)", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None, NamedColors.yellow.value, None,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0, None,", "1, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12,", "0x7A), 2, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2,", "0x4F), 2, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2,", "12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None,", "2, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24,", "0x50), 2, 11, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None, NamedColors.red.value,", "0x45), 2, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2,", "retain the above copyright notice, this # list of conditions", "2, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6,", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED", "0x50), 2, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "1, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13,", "9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12, None, None,", "0x6C), 1, 15, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2,", "disclaimer in the documentation # and/or other materials provided with", "0x54), 2, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2,", "0x79), 1, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51),", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9,", "0x70), 1, 15, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12, None, None, None)", "2, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16, None, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None, NamedColors.green.value, None,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None, NamedColors.red.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43),", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None,", "10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8,", "2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "2, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None, NamedColors.yellow.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0x62), 1, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8, None, None,", "[0x11, 0x12, 0x15, 0x16, 0x17, 0x10, 0x13, 0x14] channel_2_byte_1 =", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0, None, None,", "14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None,", "2, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None, NamedColors.cyan.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "2, 15, 0, None, None, None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51),", "7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20,", "5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None,", "0x7D), 1, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None, NamedColors.green.value,", "1, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14,", "15, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None, NamedColors.magenta.value,", "0x47), 2, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2,", "10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24,", "0x5B), 2, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None,", "2, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5,", "2, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8, None, None,", "binary form must reproduce the above copyright notice, # this", "2, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11,", "0x43), 2, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None, NamedColors.magenta.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43),", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None, NamedColors.green.value, None,", "None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None, NamedColors.magenta.value,", "1, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None, NamedColors.green.value,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None, NamedColors.white.value,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None, NamedColors.red.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78),", "2, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9,", "pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None, NamedColors.cyan.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4, None, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None, NamedColors.cyan.value, None,", "1, 15, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41),", "2, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20, None, None, None) def", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12, None, None,", "6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0,", "for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in other_bytes_1:", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "0x5B), 1, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1,", "item not in channel_1_byte_1 and item not in channel_2_byte_1] other_bytes_2", "5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None,", "7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0,", "8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None,", "0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self, pac, channel, row, indent, color,", "test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20, None, None,", "2, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8,", "0x5F), 1, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None, NamedColors.white.value, FontStyleType.italic,", "4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12, None, None, TextDecorationType(underline=True))", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20, None, None,", "15, 20, None, None, None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None, NamedColors.blue.value,", "6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4, None, None, None)", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0, None,", "test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None, NamedColors.blue.value, None,", "2, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4,", "def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8, None, None, TextDecorationType(underline=True))", "4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None, NamedColors.green.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20, None, None, TextDecorationType(underline=True))", "2, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0, None, None, None)", "0x40), 1, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24, None,", "2, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0, None, None, None)", "13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x44), 2, 14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2,", "0x5E), 1, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1,", "0x5F), 2, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None, NamedColors.blue.value, None, None) def", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None, NamedColors.cyan.value, None,", "0x4C), 1, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None, NamedColors.white.value,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0, None,", "0x41), 1, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28, None,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None, NamedColors.cyan.value,", "1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24, None, None,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None, NamedColors.red.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C),", "0x7A), 2, 15, 20, None, None, None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x79), 2, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2,", "14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None,", "None, None, None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28,", "15, 16, None, None, None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F),", "2, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "1, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "0x64), 1, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1,", "0x6D), 1, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1,", "0x52), 2, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None, NamedColors.white.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24, None, None,", "0x42), 2, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C),", "1, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x69), 1, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "and/or other materials provided with the distribution. # # THIS", "0x7A), 1, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None, NamedColors.white.value, FontStyleType.italic,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "2, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77),", "1, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None, NamedColors.red.value, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None, NamedColors.red.value, None,", "1, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8,", "8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20,", "1, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12, None,", "13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0, None, None,", "0x65), 2, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "1, 15, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1,", "15, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "# Redistribution and use in source and binary forms, with", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None, NamedColors.blue.value, None, None)", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None, NamedColors.red.value, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57),", "1, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4,", "0x7D), 2, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2,", "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20, None, None, TextDecorationType(underline=True))", "provided with the distribution. # # THIS SOFTWARE IS PROVIDED", "3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None, NamedColors.magenta.value,", "None, None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None, NamedColors.blue.value,", "2, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) def test_scc_pac_green(self):", "2, 15, 8, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56),", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None, NamedColors.white.value,", "11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None,", "11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12, None,", "0x63), 1, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1,", "0x18, 0x1B, 0x1C] all_range = list(range(0x00, 0XFF)) byte_2_range = range(0x40,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None, NamedColors.white.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "0x77), 1, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1,", "1, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0, None, None,", "10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20, None, None,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None, NamedColors.red.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8, None, None,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None, NamedColors.yellow.value,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None, NamedColors.white.value, FontStyleType.italic, None)", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None, NamedColors.white.value,", "import SccPreambleAddressCode from ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase):", "8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None,", "0x4D), 2, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2,", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None, NamedColors.blue.value, None,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None, NamedColors.blue.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A),", "> 0x5F and b1 % 0x08 == 0: # row", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None, NamedColors.blue.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0x4C), 1, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1,", "2, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20, None, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None, NamedColors.white.value, FontStyleType.italic, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B),", "0x65), 1, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1,", "5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "0x61), 1, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1,", "0x7F), 2, 6, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2,", "1, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self):", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None, NamedColors.white.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "0x52), 1, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1,", "1, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49),", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None, NamedColors.white.value, FontStyleType.italic,", "0x7D), 2, 15, 24, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x51), 2, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16, None, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4, None, None,", "13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None,", "0x6C), 1, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20, None, None, None)", "15, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4, None,", "2, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4, None,", "2, 13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24, None, None, TextDecorationType(underline=True))", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x53), 1, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1,", "1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "0x4F), 1, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16, None, None, TextDecorationType(underline=True))", "0x52), 2, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None, NamedColors.magenta.value, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "2, 11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8, None, None, None) def test_scc_pac_indent_8_underline(self):", "TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B),", "0x70), 1, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1,", "0x4E), 2, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16, None, None,", "1, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x51), 1, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1,", "LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63),", "0x6C), 1, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1,", "9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None, NamedColors.red.value,", "0x48), 2, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2,", "test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24, None,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8, None,", "3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A),", "0x61), 2, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57),", "10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12,", "1, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12, None, None, TextDecorationType(underline=True))", "11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12,", "TextDecorationType, NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1 = [0x11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS", "0x45), 1, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None,", "15, 28, None, None, TextDecorationType(underline=True)) if __name__ == '__main__': unittest.main()", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None, NamedColors.yellow.value, None, None)", "0x7D), 2, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None, NamedColors.magenta.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28, None, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12, None,", "7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None, NamedColors.red.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None, NamedColors.yellow.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16, None, None,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None, NamedColors.yellow.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None, NamedColors.green.value, None, None)", "0x78), 2, 13, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43),", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1,", "1, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C),", "1, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55),", "2, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11,", "0x7F), 1, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0, None,", "0x75), 1, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1,", "1, 15, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16, None,", "1, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12, None,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None, NamedColors.yellow.value, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None, NamedColors.cyan.value, None,", "1, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6,", "13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None, NamedColors.white.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28, None, None,", "1, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None, NamedColors.white.value, FontStyleType.italic, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None, NamedColors.green.value, None, None)", "0x45), 1, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None, NamedColors.red.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74),", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None, NamedColors.blue.value, None,", "13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8, None, None, None)", "1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70),", "1, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44),", "0x7C), 2, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None, NamedColors.blue.value, None, None)", "1, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6,", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "0x7B), 2, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8, None,", "copyright notice, this # list of conditions and the following", "2, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E),", "9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None,", "1, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74),", "11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28, None, None, TextDecorationType(underline=True))", "11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12,", "0x4C), 1, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F),", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24, None,", "0x68), 1, 15, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2,", "0x63), 1, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28, None, None,", "INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY,", "0x4C), 2, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "0x76), 2, 15, 12, None, None, None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x6C), 2, 15, None, NamedColors.magenta.value, None, None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71),", "7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12,", "2, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0, None, None,", "0x73), 2, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8, None, None,", "1, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63),", "1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4, None, None,", "0x5E), 1, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None,", "0x62), 1, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1,", "0x5A), 1, 1, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50),", "4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None,", "14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None,", "= list(range(0x00, 0XFF)) byte_2_range = range(0x40, 0x80) other_bytes_1 = [item", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None, NamedColors.magenta.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28, None, None, TextDecorationType(underline=True))", "14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66),", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None, NamedColors.green.value,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None, NamedColors.red.value, None,", "None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8, None, None,", "test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61),", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None, NamedColors.white.value,", "3, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None, NamedColors.magenta.value, None, None)", "2, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11,", "None, None, None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24, None, None, None)", "0x52), 1, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "0x74), 2, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None, NamedColors.yellow.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20, None, None, None)", "0x6A), 2, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2,", "13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16, None, None, TextDecorationType(underline=True))", "15, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24, None, None, None)", "5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12,", "None, None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D),", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0, None, None, TextDecorationType(underline=True))", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48),", "2, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8,", "documentation # and/or other materials provided with the distribution. #", "2, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8, None, None, TextDecorationType(underline=True))", "2, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E),", "test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28, None, None, None)", "NamedColors.green.value, None, None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None,", "0x6D), 2, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2,", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self):", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12, None, None, None)", "0x4F), 2, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2,", "0x54), 1, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1,", "11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None,", "0x6C), 2, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None, NamedColors.cyan.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20, None, None, None)", "1, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71),", "1, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8, None, None, None)", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8, None, None, TextDecorationType(underline=True))", "0x5E), 1, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28, None,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28, None, None, TextDecorationType(underline=True)) if __name__", "2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None, NamedColors.yellow.value, None, None)", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None, NamedColors.white.value, FontStyleType.italic, None)", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None, NamedColors.cyan.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None, NamedColors.red.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B),", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None, NamedColors.green.value,", "1, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3,", "0x60), 2, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2,", "in the documentation # and/or other materials provided with the", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None, NamedColors.blue.value,", "8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C),", "1, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20, None, None,", "2, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12,", "4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12,", "9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20,", "0x4B), 2, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "1, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24, None, None, None)", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None, NamedColors.blue.value,", "0x58), 1, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1,", "0x62), 1, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14,", "1, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11,", "6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None, NamedColors.red.value, None,", "0x51), 1, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B),", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28, None,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None, NamedColors.magenta.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77),", "1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None,", "1, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20, None,", "1, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None, NamedColors.white.value,", "1, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15,", "3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None, NamedColors.green.value, None, None) def test_scc_pac_green_underline(self):", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B),", "2, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4,", "2, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15,", "2, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24, None, None, TextDecorationType(underline=True))", "12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24,", "0x70), 2, 15, 0, None, None, None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79),", "0x63), 1, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1,", "2, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6,", "2, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5,", "0x1E, 0x1F, 0x18, 0x1B, 0x1C] all_range = list(range(0x00, 0XFF)) byte_2_range", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None, NamedColors.white.value,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None, NamedColors.yellow.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "2, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12,", "0x58), 1, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8, None, None, TextDecorationType(underline=True))", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16, None,", "None, None, None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None, NamedColors.magenta.value, None, None) def", "2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "1, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3,", "0x59), 1, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1,", "2, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12,", "0x75), 2, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None, NamedColors.yellow.value, None,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8, None,", "for item in all_range if item not in channel_1_byte_1 and", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None, NamedColors.white.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0, None, None, None)", "2, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11,", "24, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None, NamedColors.magenta.value, None, None)", "0x50), 1, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1,", "1, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None, NamedColors.red.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "1, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None, NamedColors.white.value, None,", "12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A),", "0x40), 2, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4, None, None, TextDecorationType(underline=True))", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None, NamedColors.white.value, FontStyleType.italic,", "def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20, None, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24, None, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None, NamedColors.cyan.value, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D),", "0x46), 2, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2,", "2, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28, None, None, TextDecorationType(underline=True))", "5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16,", "0x72), 2, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16, None, None, None)", "1, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None, NamedColors.green.value, None,", "1, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None, NamedColors.blue.value,", "2, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None, NamedColors.magenta.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50),", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20, None,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None, NamedColors.green.value, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8, None,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None, NamedColors.white.value, FontStyleType.italic,", "0x62), 2, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2,", "1, 12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13,", "1, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24, None, None, None)", "13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16, None,", "2, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28, None, None, None)", "2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None, NamedColors.cyan.value, None,", "14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None, NamedColors.magenta.value,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None, NamedColors.magenta.value, None,", "2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None,", "7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8, None,", "1, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None,", "0x57), 1, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11,", "0x64), 2, 15, None, NamedColors.blue.value, None, None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None,", "list of conditions and the following disclaimer in the documentation", "1, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.assertIsNotNone(pac) for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "2, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x77), 1, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4, None, None, TextDecorationType(underline=True))", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None, NamedColors.blue.value, None,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None, NamedColors.yellow.value, None,", "2, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0, None, None, None) def", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4, None, None, TextDecorationType(underline=True))", "4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None, NamedColors.green.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28,", "0x5C), 2, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2,", "15, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None, NamedColors.blue.value,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0, None,", "0x55), 2, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A),", "8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "1, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52),", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None, NamedColors.magenta.value,", "8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None, NamedColors.white.value, FontStyleType.italic,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0, None,", "10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None, NamedColors.cyan.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None, NamedColors.green.value,", "0x5D), 1, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None, NamedColors.magenta.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12, None, None,", "0x5B), 1, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F),", "2, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None, NamedColors.white.value,", "1, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13,", "1, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None, NamedColors.cyan.value, None, None)", "for b2 in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self, pac,", "2, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7,", "0x57), 1, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1,", "1, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10,", "0x63), 1, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1,", "0x46), 1, 9, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1,", "0x52), 1, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0x6B), 1, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1,", "0x47), 2, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2,", "0, None, None, None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1,", "None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16, None, None,", "2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None,", "None, NamedColors.magenta.value, None, None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4, None, None,", "15, None, NamedColors.green.value, None, None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24, None, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None, NamedColors.white.value,", "0x48), 2, 3, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2,", "4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28, None,", "1, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E),", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None, NamedColors.green.value, None,", "8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16, None, None, TextDecorationType(underline=True))", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20, None,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None, NamedColors.green.value, None,", "None, NamedColors.cyan.value, None, None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1,", "0x5D), 1, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1,", "2, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11,", "2, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x41), 2, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None, NamedColors.red.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None, NamedColors.white.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None, NamedColors.red.value,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None, NamedColors.yellow.value,", "0x61), 1, 8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1,", "0x60), 2, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None, NamedColors.cyan.value, None,", "DAMAGE. \"\"\"Unit tests for the SCC PACs\"\"\" # pylint: disable=R0201,C0115,C0116", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x5B), 1, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56),", "0x6E), 1, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None, NamedColors.green.value,", "test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24, None, None, TextDecorationType(underline=True))", "0x70), 2, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20, None, None,", "0x40), 2, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None, NamedColors.magenta.value, None, None)", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12, None,", "7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A),", "test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "1, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1,", "0x74), 1, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12, None, None, TextDecorationType(underline=True))", "9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76),", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None, NamedColors.white.value, None,", "0x4B), 1, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1,", "2, 1, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2,", "2, 15, None, NamedColors.green.value, None, None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43),", "1, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12,", "2, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2,", "0x74), 2, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2,", "0x68), 1, 10, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28, None, None,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None, NamedColors.cyan.value,", "0x7A), 2, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None, NamedColors.yellow.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24, None, None, None)", "15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None,", "0x52), 2, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8, None, None,", "1, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2,", "pylint: disable=R0201,C0115,C0116 import unittest from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from ttconv.style_properties", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "1, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7,", "0x53), 1, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1,", "0x68), 2, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20, None,", "0x49), 2, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D),", "0x70), 1, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1,", "0x61), 1, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20, None, None,", "0x71), 1, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1,", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING", "b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in channel_2_byte_1: for", "OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT,", "2, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_20(self):", "2, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42),", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8, None,", "None, NamedColors.yellow.value, None, None) def test_scc_pac_yellow_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74),", "2, 10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11,", "OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None, NamedColors.red.value, None,", "1, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_24(self):", "in channel_2_byte_1] other_bytes_2 = [item for item in all_range if", "6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20, None,", "2, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12,", "8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12, None, None, TextDecorationType(underline=True))", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None, NamedColors.cyan.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20, None, None, TextDecorationType(underline=True))", "7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None, NamedColors.green.value,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None, NamedColors.green.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None, NamedColors.blue.value, None, None)", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None, NamedColors.cyan.value, None,", "0x7E), 1, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1,", "0x6F), 1, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1,", "12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x55), 1, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None, NamedColors.white.value,", "1, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28, None, None, TextDecorationType(underline=True))", "8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None,", "5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None, NamedColors.yellow.value, None,", "2, 9, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24,", "0x48), 1, 1, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58),", "10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None, NamedColors.cyan.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59),", "1, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24,", "11, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8, None, None, None)", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None, NamedColors.cyan.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "2, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2,", "1, 9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10,", "2, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40),", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16, None,", "2, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "0x5E), 2, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None, NamedColors.green.value, None,", "None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F),", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None, NamedColors.red.value,", "3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4,", "TextDecorationType(underline=True)) def test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12, None, None,", "0x47), 1, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1,", "13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0,", "2, 2, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None, NamedColors.red.value, None,", "0x78), 2, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "2, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24, None, None, TextDecorationType(underline=True))", "0x48), 1, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12, None, None,", "2, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5,", "2, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0, None, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0, None, None,", "2, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None, NamedColors.yellow.value, None,", "0x5F), 1, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76),", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12, None,", "1, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, NamedColors.white.value, FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40),", "0x7B), 2, 2, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2,", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED #", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None, NamedColors.white.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24, None, None, None)", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16, None,", "14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28, None, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8, None,", "0x51), 2, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) def", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8, None, None,", "2, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62),", "2, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "1, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50),", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None, NamedColors.magenta.value,", "5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None, NamedColors.white.value,", "9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61),", "1, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72),", "0x6E), 1, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12, None, None, TextDecorationType(underline=True))", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None, NamedColors.white.value, FontStyleType.italic, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4, None, None, TextDecorationType(underline=True))", "0x74), 2, 15, 8, None, None, None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None, NamedColors.blue.value,", "1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None, NamedColors.red.value, None, None)", "1, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "0x65), 1, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1,", "2, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60),", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None, NamedColors.white.value,", "2, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8,", "15, 0, None, None, None) def test_scc_pac_indent_0_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F),", "2, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10,", "0x50), 1, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16, None, None,", "7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52),", "1, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None, None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20, None, None,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None, NamedColors.white.value,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None, NamedColors.cyan.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8, None, None,", "0x62), 2, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2,", "self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A),", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None, NamedColors.green.value,", "5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4, None, None, None)", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8, None, None,", "0x6F), 2, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2,", "0x40), 1, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73),", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) def", "4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59),", "None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None, NamedColors.cyan.value,", "0x72), 1, 4, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1,", "9, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None, NamedColors.red.value, None,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45),", "9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65),", "2, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3,", "0x5C), 1, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1,", "None, TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None, NamedColors.green.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4, None, None, TextDecorationType(underline=True))", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None, NamedColors.cyan.value,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None, NamedColors.white.value, None,", "0x5C), 1, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28, None, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None, NamedColors.magenta.value,", "\"AS IS\" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C),", "0x53), 2, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2,", "0x76), 1, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1,", "1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28, None, None,", "0x72), 2, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None, NamedColors.yellow.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "2, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7,", "0x54), 1, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20, None, None,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None, NamedColors.magenta.value,", "def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None, NamedColors.cyan.value, None, None)", "15, None, NamedColors.white.value, None, None) def test_scc_pac_white_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1,", "12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None, NamedColors.white.value, FontStyleType.italic, None)", "0x59), 1, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49),", "def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None, NamedColors.green.value,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None, NamedColors.magenta.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None, NamedColors.blue.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56),", "0x6A), 2, 10, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A),", "2, 6, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7,", "10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None, NamedColors.white.value,", "0x7C), 1, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None, NamedColors.white.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None, NamedColors.yellow.value, None, None)", "1, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12,", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8,", "1, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None, NamedColors.blue.value, None, None)", "2, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4, None, None, None)", "1, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2,", "0x7A), 1, 2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1,", "7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x5D), 2, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2,", "0x60), 1, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1,", "0x53), 1, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8, None, None,", "14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16,", "2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40),", "6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None, NamedColors.blue.value, None,", "2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None, NamedColors.blue.value, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None, NamedColors.cyan.value, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None, NamedColors.blue.value,", "0x51), 1, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None, NamedColors.blue.value,", "1, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9,", "2, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8, None, None, None)", "9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12, None, None, TextDecorationType(underline=True))", "2, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4,", "0x4C), 2, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2,", "4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0, None, None, None)", "5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None, NamedColors.magenta.value,", "15, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8,", "0x78), 1, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1,", "15, 0, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A),", "10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None, NamedColors.magenta.value, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None, NamedColors.white.value,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8, None,", "0x4B), 2, 12, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "0x71), 1, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1,", "13, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8,", "2, 14, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15,", "1, 1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59),", "0x57), 2, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2,", "0x5D), 2, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2,", "1, 15, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28, None,", "13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20,", "15, 24, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16, None,", "1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20, None, None, TextDecorationType(underline=True))", "2, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74),", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12, None,", "1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None, NamedColors.white.value, FontStyleType.italic,", "8, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0,", "5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8,", "0x5C), 1, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16, None, None,", "15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None,", "6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None, NamedColors.red.value, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12, None,", "15, 8, None, None, None) def test_scc_pac_indent_8_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x6C), 1, 13, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1,", "7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16,", "0x5F), 2, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2,", "0x66), 1, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1,", "0x55), 1, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None, NamedColors.green.value,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None, NamedColors.green.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None, NamedColors.blue.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28, None, None, TextDecorationType(underline=True))", "9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None, NamedColors.red.value, None, None)", "2, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None, NamedColors.magenta.value, None, None)", "14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None, NamedColors.cyan.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self):", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20, None, None, None)", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None, NamedColors.red.value,", "2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73),", "6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None,", "0x79), 1, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58),", "0x67), 2, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12, None,", "15, None, NamedColors.magenta.value, None, None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0, None, None,", "3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4, None, None,", "14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None,", "6, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None, NamedColors.cyan.value,", "0x4A), 1, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E),", "1, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None, NamedColors.white.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4, None, None, None)", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None, NamedColors.red.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12, None, None, None)", "0x5A), 1, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1,", "0x44), 2, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28, None, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None, NamedColors.magenta.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None, NamedColors.magenta.value, None, None)", "0x7C), 1, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4, None, None, None) def", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None, NamedColors.blue.value,", "2, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6,", "0x7E), 2, 15, 28, None, None, None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0, None,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None, NamedColors.yellow.value,", "0x79), 2, 10, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None, NamedColors.blue.value, None, None)", "0x71), 1, 15, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A),", "10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None, NamedColors.blue.value, None,", "1, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None, NamedColors.white.value,", "None, None) def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None, NamedColors.white.value, None, None)", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C),", "0x51), 2, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2,", "0x5A), 1, 14, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1,", "2, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10,", "11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48),", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None, NamedColors.white.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0, None, None,", "2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28,", "0x4A), 2, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2,", "0x63), 2, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None, NamedColors.blue.value,", "10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F),", "None, None, None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D),", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None, NamedColors.yellow.value, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None, NamedColors.cyan.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 9, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10,", "0x7E), 1, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1,", "0x52), 1, 11, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12, None, None, None)", "1, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15,", "9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24,", "2, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None, NamedColors.red.value,", "0x61), 2, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2,", "def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "2, 5, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B),", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40),", "2, 7, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None, NamedColors.red.value,", "2, 5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16, None, None, None) def", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None, NamedColors.white.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B),", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4, None,", "1, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None, NamedColors.green.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None, NamedColors.yellow.value, None, None) def test_scc_pac_yellow_underline(self):", "for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in channel_2_byte_1:", "7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None,", "0x4F), 1, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None, NamedColors.magenta.value, None,", "2, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53),", "None) def test_scc_pac_green_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None, NamedColors.green.value, None,", "2, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A),", "10, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20,", "2, 9, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10,", "2, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None, NamedColors.magenta.value, None,", "0x4F), 1, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28, None, None,", "0x77), 2, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51),", "0x5F), 2, 12, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None, NamedColors.blue.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C),", "1, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4, None, None, TextDecorationType(underline=True))", "6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None,", "0x69), 1, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None, NamedColors.red.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16, None, None,", "2, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4,", "0x5A), 1, 12, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None, NamedColors.blue.value, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0, None, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None, NamedColors.magenta.value,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None, NamedColors.blue.value, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None, NamedColors.red.value, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None, NamedColors.green.value,", "14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None, NamedColors.white.value,", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None, NamedColors.white.value,", "13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28,", "test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None, NamedColors.magenta.value, None,", "0x4F), 1, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1,", "2, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7,", "1, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14,", "0x73), 1, 15, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "0x7B), 2, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2,", "1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None,", "13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None,", "2, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None, NamedColors.white.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None, NamedColors.blue.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "2, 9, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None, NamedColors.white.value, FontStyleType.italic, None) def test_scc_pac_white_italics_underline(self):", "0x7B), 1, 10, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1,", "def test_scc_pac_indent_4_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B),", "font_style, text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color())", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0, None,", "2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20,", "2, 13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14,", "2, 1, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2,", "2, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5,", "5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16, None, None, TextDecorationType(underline=True))", "2, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None, NamedColors.blue.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28, None, None,", "1, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12,", "2, 2, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16, None, None, None)", "if item not in channel_1_byte_1 and item not in channel_2_byte_1]", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12, None, None, None)", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None, NamedColors.green.value, None,", "2, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16, None,", "0x71), 2, 15, 0, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_4(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28, None, None, None)", "12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0,", "2, 12, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None, NamedColors.red.value, None, None)", "13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12, None,", "2, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None, NamedColors.green.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24, None, None,", "0x5F), 2, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2,", "2, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13,", "0x60), 2, 13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2,", "15, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4, None, None,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77),", "1, 11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20, None, None,", "[item for item in all_range if item not in channel_1_byte_1", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45),", "13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0,", "12, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None,", "0x4F), 2, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2,", "1, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None,", "2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None, NamedColors.cyan.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None, NamedColors.green.value, None,", "2, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x7E), 2, 2, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63),", "use in source and binary forms, with or without #", "8, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None, NamedColors.red.value, None, None)", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8, None,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A),", "0x46), 1, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "2, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A),", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None, NamedColors.magenta.value, None, None)", "0x58), 2, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8, None,", "9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4, None, None, TextDecorationType(underline=True))", "11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None,", "WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None, NamedColors.white.value, None, None)", "2, 10, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41),", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None, NamedColors.yellow.value,", "b1 in channel_2_byte_1: for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0, None, None,", "0x50), 2, 3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2,", "10, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A),", "4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None, NamedColors.cyan.value,", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16,", "0x43), 1, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C),", "0x56), 2, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0, None, None,", "7, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66),", "28, None, None, None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12, None, None,", "0x55), 2, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2,", "0x48), 1, 5, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1,", "1, 11, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None, NamedColors.magenta.value,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None, NamedColors.yellow.value,", "1, 5, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None, NamedColors.white.value, None, None)", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None, NamedColors.magenta.value,", "0x71), 1, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1,", "0x47), 1, 14, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1,", "0x50), 2, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8, None, None,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F),", "2, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57),", "0x45), 2, 14, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2,", "2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24,", "2, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14,", "0x49), 2, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2,", "7, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E),", "0x72), 2, 10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2,", "0x5A), 2, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67),", "0x4D), 1, 14, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1,", "1, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7,", "0x66), 1, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0, None, None, TextDecorationType(underline=True)) def", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None, NamedColors.white.value,", "2, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12,", "1, 15, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1,", "3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0,", "0x59), 1, 1, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12, None,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "2, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2,", "4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None,", "4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None, NamedColors.cyan.value,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None, NamedColors.green.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None, NamedColors.magenta.value, None, None)", "2, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C),", "self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in other_bytes_1: for b2 in range(0x00,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None, NamedColors.white.value, FontStyleType.italic,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62),", "0x73), 1, 6, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1,", "0x42), 1, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None, NamedColors.cyan.value, None,", "13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8, None, None,", "2, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "1, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None, NamedColors.blue.value, None, None)", "1, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None, NamedColors.green.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20, None, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None, NamedColors.blue.value, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None, NamedColors.white.value, FontStyleType.italic,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28, None, None, None)", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None, NamedColors.blue.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None, NamedColors.green.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61),", "Redistributions in binary form must reproduce the above copyright notice,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None, NamedColors.yellow.value, None, None)", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28, None,", "0x75), 2, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2,", "11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24,", "self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in channel_2_byte_1: for b2 in byte_2_range:", "1, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "2, 5, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24, None, None,", "10, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15,", "13, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28, None,", "1, 3, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47),", "0x4E), 1, 14, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0, None, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None, NamedColors.white.value,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None, NamedColors.white.value,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None, NamedColors.white.value, FontStyleType.italic,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8, None, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None, NamedColors.white.value, FontStyleType.italic,", "else: self.assertIsNotNone(pac) for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1", "0x7C), 2, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F),", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None, NamedColors.cyan.value,", "3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None, NamedColors.yellow.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4, None,", "2, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "0x4B), 2, 3, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2,", "1, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11,", "12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28, None, None,", "0x5F and b1 % 0x08 == 0: # row 11", "2, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11,", "1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None, NamedColors.white.value, None,", "2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16,", "None, None, None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None, NamedColors.white.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8, None, None, None)", "2, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "11, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None,", "1, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3,", "# Copyright (c) 2020, Sandflow Consulting LLC # # Redistribution", "following disclaimer. # 2. Redistributions in binary form must reproduce", "15, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None,", "0x49), 1, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None, NamedColors.cyan.value, None,", "class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self): channel_1_byte_1 = [0x11, 0x12, 0x15, 0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8, None, None,", "14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28,", "14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24,", "14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20, None, None,", "0x4B), 2, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12, None, None,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57),", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56),", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8, None,", "0x45), 1, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1,", "0x74), 1, 10, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1,", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \"\"\"Unit tests for", "2, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0, None, None, TextDecorationType(underline=True))", "0x7E), 1, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1,", "12, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None,", "1, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None, NamedColors.blue.value, None,", "7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None, NamedColors.yellow.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8, None, None, TextDecorationType(underline=True))", "0x61), 2, 6, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16, None,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None, NamedColors.white.value, FontStyleType.italic, None)", "2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None,", "1, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10,", "0x60), 1, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None, NamedColors.cyan.value, None,", "1, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12, None, None,", "2, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B),", "0x6D), 2, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None, NamedColors.green.value, None,", "2, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None, NamedColors.red.value, None,", "1, 7, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C),", "2, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5,", "0x57), 1, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "HOLDERS AND CONTRIBUTORS \"AS IS\" AND # ANY EXPRESS OR", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x4E), 2, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None, NamedColors.green.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x4B), 1, 11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1,", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #", "0x46), 1, 11, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None, NamedColors.white.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "2, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "def test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None, NamedColors.green.value, None, None)", "0x4D), 2, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2,", "0x59), 1, 11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1,", "0x58), 1, 5, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None, NamedColors.green.value, None, None)", "14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "2, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C),", "0x60), 1, 15, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None, NamedColors.white.value, FontStyleType.italic, None)", "0x5C), 2, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20, None,", "0x10, 0x13, 0x14] channel_2_byte_1 = [0x19, 0x1A, 0x1D, 0x1E, 0x1F,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None, NamedColors.white.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24, None, None, TextDecorationType(underline=True))", "1, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10,", "2, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None, NamedColors.red.value, None, TextDecorationType(underline=True)) def test_scc_pac_yellow(self):", "None, None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None, NamedColors.cyan.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "2, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14,", "in channel_1_byte_1: for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1, b2)", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None, NamedColors.magenta.value,", "1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None,", "7, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8, None, None, None)", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24, None, None,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None, NamedColors.green.value, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None, NamedColors.blue.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None, NamedColors.blue.value, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28, None, None, None)", "5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4, None, None, TextDecorationType(underline=True))", "2, 3, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "2, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16, None, None, TextDecorationType(underline=True))", "1, 14, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None, NamedColors.magenta.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None, NamedColors.cyan.value, None, None)", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16, None,", "0x64), 2, 2, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None, NamedColors.blue.value, None,", "3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None,", "4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46),", "1, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None, NamedColors.green.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8, None, None, TextDecorationType(underline=True))", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B),", "0x7B), 1, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1,", "2, 15, 28, None, None, TextDecorationType(underline=True)) if __name__ == '__main__':", "7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None, NamedColors.magenta.value, None, None)", "1, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0, None,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16, None, None, None) def test_scc_pac_indent_16_underline(self):", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0, None, None,", "for b1 in other_bytes_1: for b2 in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70),", "0x6D), 1, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None, NamedColors.white.value, FontStyleType.italic,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "0x4D), 1, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "13, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "1, 5, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6,", "1, 12, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13,", "2, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None, NamedColors.cyan.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16, None, None, TextDecorationType(underline=True))", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0, None, None,", "channel_2_byte_1 = [0x19, 0x1A, 0x1D, 0x1E, 0x1F, 0x18, 0x1B, 0x1C]", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B),", "2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None,", "2, 14, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15,", "0x68), 1, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1,", "1, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None, NamedColors.white.value,", "pac = SccPreambleAddressCode.find(b1, b2) if b2 > 0x5F and b1", "0x41), 1, 9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1,", "7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28, None, None, TextDecorationType(underline=True))", "2, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13,", "1, 12, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13,", "0x6A), 1, 15, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8, None, None,", "1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None, NamedColors.magenta.value, None, None)", "1, 11, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20, None, None,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None, NamedColors.green.value, None,", "1, 15, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76),", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D),", "1, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24, None, None, TextDecorationType(underline=True))", "2, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None, NamedColors.yellow.value, None,", "SCC PACs\"\"\" # pylint: disable=R0201,C0115,C0116 import unittest from ttconv.scc.codes.preambles_address_codes import", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4, None, None, None)", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None, NamedColors.red.value, None,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None, NamedColors.cyan.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20, None, None, TextDecorationType(underline=True))", "0x66), 2, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20, None, None, None) def test_scc_pac_indent_20_underline(self):", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24, None,", "2, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11,", "notice, # this list of conditions and the following disclaimer", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12, None, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "14, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None, NamedColors.green.value,", "# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4, None, None, TextDecorationType(underline=True))", "10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20,", "0x44), 2, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None, NamedColors.cyan.value,", "2, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7,", "12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24, None, None, None)", "1, 13, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14,", "0x7C), 1, 8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1,", "NamedColors.blue.value, None, None) def test_scc_pac_blue_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None, NamedColors.cyan.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79),", "in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self, pac, channel, row,", "2, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None, NamedColors.white.value,", "1, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16, None, None, TextDecorationType(underline=True)) def", "9, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None,", "1, 5, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A),", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None, NamedColors.yellow.value,", "2, 2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3,", "8, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20, None, None,", "0x78), 2, 6, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2,", "2, 12, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13,", "1, 15, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1,", "2, 11, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12,", "1, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8,", "0x63), 2, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8, None, None,", "0x7A), 1, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1,", "10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71),", "0x66), 1, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1,", "14, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24, None, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None, NamedColors.red.value, None,", "2, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11,", "None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_28(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28,", "Redistribution and use in source and binary forms, with or", "code must retain the above copyright notice, this # list", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None, NamedColors.yellow.value, None,", "0x41), 2, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2,", "0x49), 1, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1,", "1, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44),", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4, None,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None, NamedColors.red.value, None,", "8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None, NamedColors.white.value, FontStyleType.italic,", "2, 6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7,", "USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE", "0x64), 2, 10, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55),", "2, 13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None, NamedColors.yellow.value, None,", "10, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12, None, None, None)", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28, None,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None, NamedColors.white.value,", "1, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28, None, None,", "1, 14, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12, None, None,", "1, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16, None, None, TextDecorationType(underline=True))", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "2, 4, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5,", "NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E),", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None, NamedColors.white.value, None,", "0x76), 1, 13, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1,", "2, 2, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3,", "1, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4,", "None) def test_scc_pac_white_italics_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None, NamedColors.white.value, FontStyleType.italic,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None, NamedColors.yellow.value, None,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28, None,", "0x67), 2, 8, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2,", "0x51), 1, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) def test_scc_pac_cyan(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "0x6D), 1, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2,", "12, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None,", "0x49), 2, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None, NamedColors.yellow.value, None, None)", "2, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None, NamedColors.yellow.value, None, None)", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None, NamedColors.green.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E),", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None, NamedColors.cyan.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62),", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None, NamedColors.white.value, FontStyleType.italic,", "2, 13, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14,", "1, 9, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10,", "9, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16,", "2, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20,", "2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None,", "0x72), 2, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None, NamedColors.cyan.value,", "14, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14,", "2, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None, NamedColors.blue.value,", "None, None) def test_scc_pac_indent_20_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20, None,", "0x42), 2, 7, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2,", "0x58), 2, 12, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2,", "9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x5E), 2, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2,", "2, 10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11,", "1, 3, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24, None, None, None)", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None, NamedColors.green.value, None, None)", "15, 12, None, None, None) def test_scc_pac_indent_12_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1,", "6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None, NamedColors.white.value, FontStyleType.italic,", "9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "0x75), 1, 2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8, None,", "9, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None, NamedColors.yellow.value, None, None)", "0x6B), 1, 4, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0, None, None, None)", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24, None,", "0x48), 1, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1,", "1, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5,", "1, 1, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D),", "0x6B), 2, 6, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2,", "1, 12, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12, None, None, TextDecorationType(underline=True))", "15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24, None, None,", "0x61), 1, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1,", "0x4F), 1, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1,", "1, 7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8, None, None, None)", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None, NamedColors.cyan.value,", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None, NamedColors.red.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52),", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None, NamedColors.red.value, None,", "4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None,", "0x56), 1, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1,", "test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x67), 1, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1,", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "0x6A), 1, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B),", "12, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16,", "def test_scc_pac_indent_24(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24, None, None, None)", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None, NamedColors.white.value,", "1, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "2, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0, None,", "0x78), 1, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "0x6E), 1, 15, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "1, 8, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9,", "0x77), 2, 15, 12, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_16(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12, None, None,", "b1 in other_bytes_1: for b2 in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8, None, None, TextDecorationType(underline=True))", "5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4,", "15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None, NamedColors.blue.value,", "reproduce the above copyright notice, # this list of conditions", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x6E), 2, 13, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None, NamedColors.white.value, None,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None, NamedColors.green.value,", "TextDecorationType(underline=True)) def test_scc_pac_indent_20(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20, None, None,", "13, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None, NamedColors.red.value,", "1, 11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None, NamedColors.magenta.value, None,", "0x74), 1, 15, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2,", "13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "0x49), 1, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1,", "6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None,", "1, 12, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13,", "8, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None,", "13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "unittest from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from ttconv.style_properties import TextDecorationType, NamedColors,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None, NamedColors.white.value, None,", "8, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "2, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B),", "14, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12,", "1, 11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12,", "12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None,", "None, NamedColors.red.value, None, None) def test_scc_pac_red_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1,", "11, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None, NamedColors.white.value, None, None)", "are permitted provided that the following conditions are met: #", "1, 2, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3,", "None, None) def test_scc_pac_indent_16_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16, None,", "2, 1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69),", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None, NamedColors.red.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "2, 12, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A),", "0x7D), 1, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1,", "NamedColors.cyan.value, None, None) def test_scc_pac_cyan_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50),", "15, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72),", "0x54), 2, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2,", "0x5D), 2, 5, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2,", "NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None, NamedColors.red.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None, NamedColors.yellow.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24, None, None, None)", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None, NamedColors.magenta.value, None,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None, NamedColors.cyan.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12, None, None, None)", "0x4A), 1, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24, None, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None, NamedColors.magenta.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0, None, None, None)", "0x70), 1, 6, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None, NamedColors.red.value, None, None)", "1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20,", "0x65), 2, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16, None,", "0x41), 2, 5, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None, NamedColors.white.value, FontStyleType.italic, None)", "0x46), 2, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) def", "1, 5, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6,", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "0x74), 1, 13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1,", "0x70), 1, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1,", "2, 14, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15,", "2, 1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2,", "0x61), 2, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2,", "11, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None, NamedColors.white.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None, NamedColors.magenta.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None, NamedColors.green.value,", "0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "1, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3,", "2, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8,", "and the following disclaimer in the documentation # and/or other", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "10, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) def test_scc_pac_red(self):", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None, NamedColors.white.value,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12, None, None,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4, None,", "0x4C), 1, 14, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F),", "10, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "test_scc_pac_indent_12(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "2, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "11, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None, NamedColors.white.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A),", "0x43), 2, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24, None, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54),", "2, 6, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None, NamedColors.magenta.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "12, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None,", "1, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None, NamedColors.red.value,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62),", "0x67), 1, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1,", "2, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3,", "2, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x60), 2, 6, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2,", "11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C),", "0x49), 1, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1,", "0x5F), 1, 11, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1,", "0x48), 2, 9, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None, NamedColors.magenta.value, None,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None, NamedColors.magenta.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12, None, None, None)", "1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None,", "0x6E), 1, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None, NamedColors.blue.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "1, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14,", "2, 14, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "0x72), 1, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1,", "0x74), 2, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0, None,", "28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76),", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None, NamedColors.blue.value, None,", "0x69), 1, 2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1,", "0x41), 1, 1, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "color, font_style, text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color,", "0x7B), 1, 15, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2,", "9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4, None, None, None)", "1, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2,", "9, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12,", "3, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None, NamedColors.cyan.value,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "0x53), 1, 1, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "15, 4, None, None, TextDecorationType(underline=True)) def test_scc_pac_indent_8(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1,", "0x40), 1, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1,", "0x77), 1, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1,", "0x49), 1, 9, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1,", "2, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5,", "7, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20, None,", "0x6E), 2, 10, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2,", "11, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24,", "6, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None, NamedColors.magenta.value, None, None)", "9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24,", "5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None, NamedColors.white.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75),", "2, 3, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4,", "2, 13, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None, NamedColors.cyan.value, None,", "1, 2, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28, None, None,", "3, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None, NamedColors.yellow.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A),", "1, 12, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4, None, None, TextDecorationType(underline=True))", "1, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41),", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None, NamedColors.white.value,", "2, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4,", "2, 4, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59),", "2, 10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "2, 6, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0, None, None, TextDecorationType(underline=True))", "11, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16,", "0x53), 1, 3, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0, None, None, None)", "2, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8,", "2, 11, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16, None, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4, None, None, TextDecorationType(underline=True))", "2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None, NamedColors.green.value, None, None)", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None, NamedColors.green.value,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24, None,", "0x7C), 2, 6, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2,", "11 case self.assertIsNone(pac) else: self.assertIsNotNone(pac) for b2 in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None, NamedColors.white.value,", "GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS;", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None, NamedColors.magenta.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None, NamedColors.yellow.value, None, None)", "None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None, NamedColors.magenta.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24, None, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0, None, None,", "1, 13, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None, NamedColors.magenta.value, None, None)", "byte_2_range = range(0x40, 0x80) other_bytes_1 = [item for item in", "0x74), 1, 2, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1,", "0x64), 1, 13, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F),", "2, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13,", "1, 1, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None, NamedColors.red.value,", "1, 8, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None, NamedColors.white.value, FontStyleType.italic, None)", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None, NamedColors.white.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12, None, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None, NamedColors.white.value, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16, None,", "2, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20, None, None,", "b1 in channel_1_byte_1: for b2 in byte_2_range: pac = SccPreambleAddressCode.find(b1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28, None, None,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79),", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None, NamedColors.red.value, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None, NamedColors.white.value, None,", "None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None, NamedColors.yellow.value,", "0x48), 2, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48),", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None, NamedColors.red.value,", "4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28, None, None,", "2, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14,", "other_bytes_1 = [item for item in all_range if item not", "16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16, None,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None, NamedColors.white.value, None,", "1, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28, None, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41),", "2, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "2, 3, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28, None, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59),", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B,", "13, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8,", "0x6B), 1, 15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None, NamedColors.white.value,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None, NamedColors.yellow.value, None,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None, NamedColors.white.value,", "12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77),", "0x4C), 2, 5, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2,", "2, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44),", "3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12, None, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D),", "2, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5,", "5, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12,", "2, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60),", "BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None, NamedColors.blue.value, None,", "10, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "1, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "1, 4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5,", "1, 9, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20, None, None, TextDecorationType(underline=True))", "1, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6,", "2, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None, NamedColors.magenta.value,", "following conditions are met: # # 1. Redistributions of source", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "0x5B), 1, 1, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65),", "1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8,", "2, 12, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13,", "AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None, NamedColors.yellow.value, None, None) def", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None, NamedColors.white.value,", "10, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None,", "4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20,", "1, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9,", "0x5B), 1, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B),", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None, NamedColors.white.value,", "1, 9, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10,", "from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType", "9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None, NamedColors.yellow.value, None, TextDecorationType(underline=True))", "0x55), 1, 1, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None, NamedColors.cyan.value, None, None)", "0x50), 2, 9, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2,", "0x7C), 1, 2, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28, None, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8, None, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0, None, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8, None, None,", "of conditions and the following disclaimer. # 2. Redistributions in", "12, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4,", "0x53), 2, 5, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8, None, None,", "7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24, None,", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None, NamedColors.blue.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12, None, None, TextDecorationType(underline=True))", "in other_bytes_2: self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) for b1 in channel_2_byte_1: for b2", "2, 14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15,", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None, NamedColors.yellow.value,", "SccPreambleAddressCode from ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F),", "3, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20, None, None, TextDecorationType(underline=True)) def", "1, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None, NamedColors.white.value, FontStyleType.italic,", "1, 2, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3,", "2, 1, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None, NamedColors.cyan.value, None,", "1, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8,", "2, 8, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8, None, None, None)", "8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24,", "1, 15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1,", "0x6C), 1, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None, NamedColors.white.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0, None, None, None)", "2, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "1, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None, NamedColors.white.value, FontStyleType.italic,", "with the distribution. # # THIS SOFTWARE IS PROVIDED BY", "8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None,", "1, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16, None, None, TextDecorationType(underline=True))", "1, 12, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13,", "1, 7, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8,", "NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None, NamedColors.white.value, None,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12, None, None,", "0x42), 1, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16, None, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None, NamedColors.red.value, None,", "24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24, None,", "test_scc_pac_green(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None, NamedColors.white.value, FontStyleType.italic,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "permitted provided that the following conditions are met: # #", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None, NamedColors.blue.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None, NamedColors.magenta.value, None, None)", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0, None, None,", "4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16,", "4, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0,", "0x47), 2, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2,", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None, NamedColors.white.value,", "0x55), 1, 12, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "0x51), 1, 11, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1,", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50),", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C),", "pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0x66), 1, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None, NamedColors.cyan.value,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None, NamedColors.cyan.value,", "from ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType class SCCPreambleAddressCodesTest(unittest.TestCase): def test_scc_pac_values(self):", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None, NamedColors.blue.value, None, None)", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0, None, None,", "2, 8, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9,", "14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28, None,", "2, 8, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41),", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None, NamedColors.red.value,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None, NamedColors.yellow.value, None,", "1, 1, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2,", "None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None, NamedColors.white.value,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12, None, None,", "2, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2,", "2, 4, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None, NamedColors.white.value,", "4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16,", "2, 6, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7,", "4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8, None,", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None, NamedColors.white.value, FontStyleType.italic,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16, None, None, TextDecorationType(underline=True))", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20, None, None, None)", "14, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24, None,", "0x7E), 2, 8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2,", "# this list of conditions and the following disclaimer in", "2, 5, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6,", "0x56), 2, 12, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71),", "other_bytes_1: for b2 in range(0x00, 0xFF): self.assertIsNone(SccPreambleAddressCode.find(b1, b2)) def check_scc_pac_attributes(self,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None, NamedColors.magenta.value, None, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16, None, None, None)", "8, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28,", "4, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None, NamedColors.white.value, FontStyleType.italic, None)", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None, NamedColors.green.value,", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None, NamedColors.red.value, None, None)", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None, NamedColors.magenta.value, None,", "1, 13, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14,", "11, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None,", "12, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None,", "4, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None,", "\"\"\"Unit tests for the SCC PACs\"\"\" # pylint: disable=R0201,C0115,C0116 import", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12, None, None,", "# pylint: disable=R0201,C0115,C0116 import unittest from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode from", "2, 4, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5,", "2, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9,", "1, 3, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None, NamedColors.blue.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None, NamedColors.magenta.value, None, None)", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None, NamedColors.magenta.value,", "11, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None,", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None, NamedColors.cyan.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D),", "None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) def test_scc_pac_red(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1,", "None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None, NamedColors.white.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E),", "1, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None,", "None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None, NamedColors.red.value,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None, NamedColors.white.value, FontStyleType.italic,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None, NamedColors.magenta.value,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8, None, None, TextDecorationType(underline=True))", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45),", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20, None, None, None)", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None, NamedColors.red.value,", "0x42), 1, 3, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1,", "1, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24, None,", "0x64), 1, 8, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1,", "0x55), 1, 9, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0, None, None,", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None, NamedColors.magenta.value,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8, None, None, TextDecorationType(underline=True))", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0, None, None, None) def test_scc_pac_indent_0_underline(self):", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "15, 28, None, None, None) def test_scc_pac_indent_28_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1,", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "in all_range if item not in list(byte_2_range)] for b1 in", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None, NamedColors.cyan.value, None, None)", "NamedColors.magenta.value, None, None) def test_scc_pac_magenta_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True))", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70),", "13, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24,", "0x7F), 1, 2, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16, None, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8, None, None, None)", "None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None, NamedColors.yellow.value,", "5, 0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0,", "text_decoration): self.assertEqual(channel, pac.get_channel()) self.assertEqual(row, pac.get_row()) self.assertEqual(indent, pac.get_indent()) self.assertEqual(color, pac.get_color()) self.assertEqual(font_style,", "7, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E,", "0x61), 1, 4, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20, None, None, TextDecorationType(underline=True))", "# modification, are permitted provided that the following conditions are", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D,", "0x76), 1, 4, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1,", "0x43), 1, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1,", "1, 1, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20, None, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "1, 3, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72),", "7, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24,", "20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4, None,", "14, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8, None, None,", "1, 12, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13,", "0x73), 2, 4, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2,", "15, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4,", "2, 7, None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8,", "NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None, NamedColors.cyan.value, None,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None, NamedColors.cyan.value, None, None)", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None, NamedColors.white.value, None, TextDecorationType(underline=True))", "NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None, NamedColors.cyan.value, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12, None,", "2, 11, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44),", "0x42), 2, 5, None, NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2,", "12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24, None, None,", "2, None, NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49),", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C,", "1, 14, None, NamedColors.green.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15,", "None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None, NamedColors.cyan.value,", "0x4A), 2, 7, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16, None, None,", "self.assertEqual(color, pac.get_color()) self.assertEqual(font_style, pac.get_font_style()) self.assertEqual(text_decoration, pac.get_text_decoration()) def test_scc_pac_white(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40),", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28, None,", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES", "10, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16, None,", "8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75),", "0x4E), 2, 1, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2,", "1, 2, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4, None, None,", "FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True))", "6, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None,", "15, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1,", "5, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0,", "0x5A), 2, 5, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8, None, None,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None, NamedColors.red.value, None,", "3, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A),", "4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28,", "this list of conditions and the following disclaimer in the", "0x7F), 1, 8, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None, NamedColors.blue.value,", "1, 2, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3,", "1, 15, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1,", "0x64), 1, 15, None, NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2,", "None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None, NamedColors.magenta.value,", "13, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4,", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24, None,", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None, NamedColors.cyan.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16,", "8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E),", "def test_scc_pac_values(self): channel_1_byte_1 = [0x11, 0x12, 0x15, 0x16, 0x17, 0x10,", "0: # row 11 case self.assertIsNone(pac) else: self.assertIsNotNone(pac) for b2", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None, NamedColors.green.value, None, None)", "NamedColors.blue.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None, NamedColors.blue.value, None,", "0x6D), 2, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) def test_scc_pac_white_italics(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11,", "0, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0, None,", "2, 14, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15,", "2, 5, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24, None, None, None) def test_scc_pac_indent_24_underline(self):", "NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None, NamedColors.white.value, FontStyleType.italic,", "None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None, NamedColors.white.value,", "11, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16, None, None, TextDecorationType(underline=True))", "2, 9, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10,", "1, 0, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0,", "None, NamedColors.red.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None, NamedColors.red.value,", "2, 9, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10,", "28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None, NamedColors.magenta.value, None, TextDecorationType(underline=True))", "None, NamedColors.white.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None, NamedColors.white.value,", "15, None, NamedColors.green.value, None, TextDecorationType(underline=True)) def test_scc_pac_blue(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "1, 15, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "def check_scc_pac_attributes(self, pac, channel, row, indent, color, font_style, text_decoration): self.assertEqual(channel,", "13, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A),", "1, 7, 28, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4, None, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B),", "0x6C), 2, 2, None, NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A),", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None, NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A,", "2, 15, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) def test_scc_pac_indent_0(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50),", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20, None, None,", "4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20, None, None,", "NamedColors.white.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None, NamedColors.white.value, None,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B),", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72),", "None) def test_scc_pac_indent_24_underline(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24, None, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None, NamedColors.red.value, None, TextDecorationType(underline=True))", "2, 7, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8,", "1, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59),", "1, 15, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "0x47), 1, 12, None, NamedColors.cyan.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1,", "1, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8,", "16, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16, None,", "1, 8, None, NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9,", "self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None, NamedColors.white.value, FontStyleType.italic, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F),", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "0x6E), 2, 2, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12, None, None, None)", "python # -*- coding: UTF-8 -*- # Copyright (c) 2020,", "NamedColors.green.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None, NamedColors.green.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None, NamedColors.cyan.value, None, TextDecorationType(underline=True))", "OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED", "Sandflow Consulting LLC # # Redistribution and use in source", "None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None, NamedColors.magenta.value,", "None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None, NamedColors.cyan.value, None, None)", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20, None, None,", "0x73), 1, 2, 4, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1,", "# -*- coding: UTF-8 -*- # Copyright (c) 2020, Sandflow", "1, 8, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9,", "6, None, NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None,", "not in channel_1_byte_1 and item not in channel_2_byte_1] other_bytes_2 =", "1, 6, 8, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7,", "NamedColors.magenta.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None, NamedColors.magenta.value, None,", "2, 6, 20, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None, NamedColors.blue.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18,", "NamedColors.red.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None, NamedColors.red.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None, NamedColors.green.value, None, TextDecorationType(underline=True))", "TextDecorationType(underline=True)) def test_scc_pac_magenta(self): self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None, NamedColors.magenta.value, None,", "None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20, None, None,", "NamedColors.blue.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None, NamedColors.blue.value, None,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None, NamedColors.yellow.value, None,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16, None, None, TextDecorationType(underline=True))", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20, None, None, TextDecorationType(underline=True))", "24, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24, None,", "12, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12, None,", "TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None, NamedColors.magenta.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17,", "None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0, None, None, TextDecorationType(underline=True))", "8, None, NamedColors.yellow.value, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None,", "NamedColors.yellow.value, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None, NamedColors.yellow.value, None,", "None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None, NamedColors.white.value, FontStyleType.italic, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x15,", "2, 10, 24, None, None, TextDecorationType(underline=True)) self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11,", "None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8, None, None,", "2, 12, None, None, None) self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12," ]
[ "The image name. :type image_name: str :param sys_prep: Indicates whether", "VHD. :param image_name: The image name. :type image_name: str :param", "the custom image (i.e. Windows, Linux). Possible values include: 'Windows',", "'image_name': {'key': 'imageName', 'type': 'str'}, 'sys_prep': {'key': 'sysPrep', 'type': 'bool'},", "the MIT License. See License.txt in the project root for", "from a VHD. :param image_name: The image name. :type image_name:", "creating a custom image from a VHD. :param image_name: The", "-------------------------------------------------------------------------- from msrest.serialization import Model class CustomImagePropertiesCustom(Model): \"\"\"Properties for creating", "image (i.e. Windows, Linux). Possible values include: 'Windows', 'Linux', 'None'", "the project root for # license information. # # Code", "_validation = { 'os_type': {'required': True}, } _attribute_map = {", "{ 'image_name': {'key': 'imageName', 'type': 'str'}, 'sys_prep': {'key': 'sysPrep', 'type':", "Generator. # Changes may cause incorrect behavior and will be", "'sys_prep': {'key': 'sysPrep', 'type': 'bool'}, 'os_type': {'key': 'osType', 'type': 'str'},", "OS type of the custom image (i.e. Windows, Linux). Possible", "'None' :type os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation = {", "\"\"\"Properties for creating a custom image from a VHD. :param", "-------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "{ 'os_type': {'required': True}, } _attribute_map = { 'image_name': {'key':", "_attribute_map = { 'image_name': {'key': 'imageName', 'type': 'str'}, 'sys_prep': {'key':", "license information. # # Code generated by Microsoft (R) AutoRest", "\"\"\" _validation = { 'os_type': {'required': True}, } _attribute_map =", "a VHD. :param image_name: The image name. :type image_name: str", "by Microsoft (R) AutoRest Code Generator. # Changes may cause", "Changes may cause incorrect behavior and will be lost if", "behavior and will be lost if the code is #", "def __init__(self, os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name = image_name", "incorrect behavior and will be lost if the code is", "'os_type': {'required': True}, } _attribute_map = { 'image_name': {'key': 'imageName',", "run on the VHD. :type sys_prep: bool :param os_type: The", "<reponame>NMijat1024/azure-sdk-for-python # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation.", ":type os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation = { 'os_type':", "MIT License. See License.txt in the project root for #", "AutoRest Code Generator. # Changes may cause incorrect behavior and", "image_name: The image name. :type image_name: str :param sys_prep: Indicates", "'osType', 'type': 'str'}, } def __init__(self, os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom,", "image name. :type image_name: str :param sys_prep: Indicates whether sysprep", "may cause incorrect behavior and will be lost if the", "the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import", "The OS type of the custom image (i.e. Windows, Linux).", "project root for # license information. # # Code generated", "Model class CustomImagePropertiesCustom(Model): \"\"\"Properties for creating a custom image from", "See License.txt in the project root for # license information.", "'type': 'str'}, } def __init__(self, os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__()", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "been run on the VHD. :type sys_prep: bool :param os_type:", "type of the custom image (i.e. Windows, Linux). Possible values", "Linux). Possible values include: 'Windows', 'Linux', 'None' :type os_type: str", "True}, } _attribute_map = { 'image_name': {'key': 'imageName', 'type': 'str'},", "sys_prep: bool :param os_type: The OS type of the custom", "Possible values include: 'Windows', 'Linux', 'None' :type os_type: str or", "# -------------------------------------------------------------------------- from msrest.serialization import Model class CustomImagePropertiesCustom(Model): \"\"\"Properties for", "generated by Microsoft (R) AutoRest Code Generator. # Changes may", "name. :type image_name: str :param sys_prep: Indicates whether sysprep has", "class CustomImagePropertiesCustom(Model): \"\"\"Properties for creating a custom image from a", "in the project root for # license information. # #", "'type': 'str'}, 'sys_prep': {'key': 'sysPrep', 'type': 'bool'}, 'os_type': {'key': 'osType',", "reserved. # Licensed under the MIT License. See License.txt in", "super(CustomImagePropertiesCustom, self).__init__() self.image_name = image_name self.sys_prep = sys_prep self.os_type =", "'Windows', 'Linux', 'None' :type os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation", "sys_prep: Indicates whether sysprep has been run on the VHD.", "whether sysprep has been run on the VHD. :type sys_prep:", "image from a VHD. :param image_name: The image name. :type", "__init__(self, os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name = image_name self.sys_prep", "os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name = image_name self.sys_prep =", "# # Code generated by Microsoft (R) AutoRest Code Generator.", "regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class CustomImagePropertiesCustom(Model): \"\"\"Properties", "os_type: The OS type of the custom image (i.e. Windows,", "= { 'image_name': {'key': 'imageName', 'type': 'str'}, 'sys_prep': {'key': 'sysPrep',", "Corporation. All rights reserved. # Licensed under the MIT License.", "custom image (i.e. Windows, Linux). Possible values include: 'Windows', 'Linux',", "# Licensed under the MIT License. See License.txt in the", "custom image from a VHD. :param image_name: The image name.", ":param image_name: The image name. :type image_name: str :param sys_prep:", "# Changes may cause incorrect behavior and will be lost", "'imageName', 'type': 'str'}, 'sys_prep': {'key': 'sysPrep', 'type': 'bool'}, 'os_type': {'key':", "msrest.serialization import Model class CustomImagePropertiesCustom(Model): \"\"\"Properties for creating a custom", "~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation = { 'os_type': {'required': True}, } _attribute_map", "sysprep has been run on the VHD. :type sys_prep: bool", "# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", ":type image_name: str :param sys_prep: Indicates whether sysprep has been", "# regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class CustomImagePropertiesCustom(Model):", "VHD. :type sys_prep: bool :param os_type: The OS type of", "Code generated by Microsoft (R) AutoRest Code Generator. # Changes", "information. # # Code generated by Microsoft (R) AutoRest Code", "coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights", "from msrest.serialization import Model class CustomImagePropertiesCustom(Model): \"\"\"Properties for creating a", "License. See License.txt in the project root for # license", "image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name = image_name self.sys_prep = sys_prep", "will be lost if the code is # regenerated. #", "= { 'os_type': {'required': True}, } _attribute_map = { 'image_name':", "lost if the code is # regenerated. # -------------------------------------------------------------------------- from", "a custom image from a VHD. :param image_name: The image", ":type sys_prep: bool :param os_type: The OS type of the", "and will be lost if the code is # regenerated.", "for creating a custom image from a VHD. :param image_name:", "is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class", "code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model", "import Model class CustomImagePropertiesCustom(Model): \"\"\"Properties for creating a custom image", "of the custom image (i.e. Windows, Linux). Possible values include:", "under the MIT License. See License.txt in the project root", "'str'}, 'sys_prep': {'key': 'sysPrep', 'type': 'bool'}, 'os_type': {'key': 'osType', 'type':", "'type': 'bool'}, 'os_type': {'key': 'osType', 'type': 'str'}, } def __init__(self,", "{'key': 'osType', 'type': 'str'}, } def __init__(self, os_type, image_name=None, sys_prep=None):", ":param sys_prep: Indicates whether sysprep has been run on the", "'os_type': {'key': 'osType', 'type': 'str'}, } def __init__(self, os_type, image_name=None,", "cause incorrect behavior and will be lost if the code", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "All rights reserved. # Licensed under the MIT License. See", "self).__init__() self.image_name = image_name self.sys_prep = sys_prep self.os_type = os_type", "Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect", "or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation = { 'os_type': {'required': True}, }", "bool :param os_type: The OS type of the custom image", "{'key': 'imageName', 'type': 'str'}, 'sys_prep': {'key': 'sysPrep', 'type': 'bool'}, 'os_type':", "has been run on the VHD. :type sys_prep: bool :param", "os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation = { 'os_type': {'required':", "} _attribute_map = { 'image_name': {'key': 'imageName', 'type': 'str'}, 'sys_prep':", "sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name = image_name self.sys_prep = sys_prep self.os_type", "image_name: str :param sys_prep: Indicates whether sysprep has been run", "str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation = { 'os_type': {'required': True},", "CustomImagePropertiesCustom(Model): \"\"\"Properties for creating a custom image from a VHD.", "include: 'Windows', 'Linux', 'None' :type os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\"", "root for # license information. # # Code generated by", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Licensed under the MIT License. See License.txt in the project", "on the VHD. :type sys_prep: bool :param os_type: The OS", "# Code generated by Microsoft (R) AutoRest Code Generator. #", "'sysPrep', 'type': 'bool'}, 'os_type': {'key': 'osType', 'type': 'str'}, } def", "rights reserved. # Licensed under the MIT License. See License.txt", "'str'}, } def __init__(self, os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name", "# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All", "str :param sys_prep: Indicates whether sysprep has been run on", "License.txt in the project root for # license information. #", "# license information. # # Code generated by Microsoft (R)", "(i.e. Windows, Linux). Possible values include: 'Windows', 'Linux', 'None' :type", "Code Generator. # Changes may cause incorrect behavior and will", ":param os_type: The OS type of the custom image (i.e.", "'bool'}, 'os_type': {'key': 'osType', 'type': 'str'}, } def __init__(self, os_type,", "be lost if the code is # regenerated. # --------------------------------------------------------------------------", "'Linux', 'None' :type os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType \"\"\" _validation =", "{'key': 'sysPrep', 'type': 'bool'}, 'os_type': {'key': 'osType', 'type': 'str'}, }", "Indicates whether sysprep has been run on the VHD. :type", "for # license information. # # Code generated by Microsoft", "} def __init__(self, os_type, image_name=None, sys_prep=None): super(CustomImagePropertiesCustom, self).__init__() self.image_name =", "if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization", "(R) AutoRest Code Generator. # Changes may cause incorrect behavior", "{'required': True}, } _attribute_map = { 'image_name': {'key': 'imageName', 'type':", "the VHD. :type sys_prep: bool :param os_type: The OS type", "values include: 'Windows', 'Linux', 'None' :type os_type: str or ~azure.mgmt.devtestlabs.models.CustomImageOsType", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "Windows, Linux). Possible values include: 'Windows', 'Linux', 'None' :type os_type:" ]
[ "[0., 0., 0.], [0., 1., 0.], [0., 1., 0.], ],", "[0., 1., 0.], ] ]) chamfer = ChamferLoss() print('chamfer loss", "and y[j,:] if y is not given then use 'y=x'.", "for 2D or 3 for 3D) output is a bx1", "of the chamfer distance for each sample of the batch", "y): chamfer = chamfer_distance(x, y) return torch.sum(chamfer) if __name__ ==", "differences, -1) return distances def chamfer_distance(x, y): ''' input x", "0.], ], [ [1., 1., 0.], [1., 2., 0.], [0.,", "torch.topk(dist_vec, k=1, dim=2, largest=False).values chamfer = torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1])", "[0., 1., 0.], ] ]) y = torch.tensor([ [ [0.,", "chamfer = ChamferLoss() print('chamfer loss torch (cpu):', chamfer(x, y)) print('chamfer", "# sys.path.append(\"../distance/chamfer/\") # import dist_chamfer as cd # CD =", "3D) output is a bx1 Matrix with the value of", "0.], [1., 2., 0.], [0., 1., 0.], ] ]) y", "torch def expanded_pairwise_distances(x, y): ''' Input: x is a bxNxd", "ChamferLoss() print('chamfer loss torch (cpu):', chamfer(x, y)) print('chamfer loss torch", "[0., 1., 0.], [0., 1., 0.], ], [ [1., 1.,", "1., 0.], [0., 1., 0.], [0., 1., 0.], ], [", "between x[i,:] and y[j,:] if y is not given then", "y is not given then use 'y=x'. i.e. dist[i,j] =", "0.], [0., 1., 0.], ] ]) y = torch.tensor([ [", "torch.tensor(x.shape[1]) return chamfer class ChamferLoss(torch.nn.Module): def forward(self, x, y): chamfer", "distances = torch.sum(differences * differences, -1) return distances def chamfer_distance(x,", "x, y): chamfer = chamfer_distance(x, y) return torch.sum(chamfer) if __name__", "1., 0.], [1., 2., 0.], [0., 1., 0.], ] ])", "= x.unsqueeze(2) - y.unsqueeze(1) distances = torch.sum(differences * differences, -1)", "import torch def expanded_pairwise_distances(x, y): ''' Input: x is a", "k=1, dim=2, largest=False).values chamfer = torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1]) return", "the square norm between x[i,:] and y[j,:] if y is", "y[j,:] if y is not given then use 'y=x'. i.e.", "with the value of the chamfer distance for each sample", "i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' differences = x.unsqueeze(2) - y.unsqueeze(1)", "norm between x[i,:] and y[j,:] if y is not given", "= expanded_pairwise_distances(x, y) min_distances = torch.topk(dist_vec, k=1, dim=2, largest=False).values chamfer", "is a bxNxd matrix y is an optional bxMxd matirx", "a bxNxM matrix where dist[i,j] is the square norm between", "# dist1, dist2, _, _= CD(x, y) # print('orig', dist1)", "are bxNxM matrix, b: batch, N:number of point, M: point", "= ||x[i,:]-y[j,:]||^2 ''' differences = x.unsqueeze(2) - y.unsqueeze(1) distances =", "x = torch.tensor([ [ [0., 0., 0.], [0., 1., 0.],", "bxNxM matrix, b: batch, N:number of point, M: point dim", "matrix, b: batch, N:number of point, M: point dim (ex.", "a bxNxd matrix y is an optional bxMxd matirx Output:", "then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' differences =", "input x and y are bxNxM matrix, b: batch, N:number", "''' dist_vec = expanded_pairwise_distances(x, y) min_distances = torch.topk(dist_vec, k=1, dim=2,", "value of the chamfer distance for each sample of the", "3 for 3D) output is a bx1 Matrix with the", "ChamferLoss(torch.nn.Module): def forward(self, x, y): chamfer = chamfer_distance(x, y) return", "2., 0.], [0., 1., 0.], ] ]) y = torch.tensor([", "]) y = torch.tensor([ [ [0., 1., 0.], [0., 1.,", "if y is not given then use 'y=x'. i.e. dist[i,j]", "= torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1]) return chamfer class ChamferLoss(torch.nn.Module): def", "matrix where dist[i,j] is the square norm between x[i,:] and", "and y are bxNxM matrix, b: batch, N:number of point,", "/ torch.tensor(x.shape[1]) return chamfer class ChamferLoss(torch.nn.Module): def forward(self, x, y):", "Input: x is a bxNxd matrix y is an optional", "use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' differences = x.unsqueeze(2)", "= torch.sum(differences * differences, -1) return distances def chamfer_distance(x, y):", "is a bxNxM matrix where dist[i,j] is the square norm", "sample of the batch ''' dist_vec = expanded_pairwise_distances(x, y) min_distances", "chamfer class ChamferLoss(torch.nn.Module): def forward(self, x, y): chamfer = chamfer_distance(x,", "if __name__ == \"__main__\": x = torch.tensor([ [ [0., 0.,", "of point, M: point dim (ex. 2 for 2D or", "[0., 1., 0.], ], [ [1., 1., 0.], [1., 2.,", "-1) return distances def chamfer_distance(x, y): ''' input x and", "[ [0., 1., 0.], [0., 1., 0.], [0., 1., 0.],", "x and y are bxNxM matrix, b: batch, N:number of", "[0., 1., 0.], [0., 1., 0.], [0., 1., 0.], ],", "y = torch.tensor([ [ [0., 1., 0.], [0., 1., 0.],", "(ex. 2 for 2D or 3 for 3D) output is", "dim (ex. 2 for 2D or 3 for 3D) output", "of the batch ''' dist_vec = expanded_pairwise_distances(x, y) min_distances =", "[ [0., 0., 0.], [0., 1., 0.], [0., 1., 0.],", "def chamfer_distance(x, y): ''' input x and y are bxNxM", "Output: dist is a bxNxM matrix where dist[i,j] is the", "* differences, -1) return distances def chamfer_distance(x, y): ''' input", "chamfer = torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1]) return chamfer class ChamferLoss(torch.nn.Module):", "where dist[i,j] is the square norm between x[i,:] and y[j,:]", "square norm between x[i,:] and y[j,:] if y is not", "''' input x and y are bxNxM matrix, b: batch,", "import dist_chamfer as cd # CD = cd.chamferDist() # dist1,", "batch, N:number of point, M: point dim (ex. 2 for", "= ChamferLoss() print('chamfer loss torch (cpu):', chamfer(x, y)) print('chamfer loss", "bxMxd matirx Output: dist is a bxNxM matrix where dist[i,j]", "optional bxMxd matirx Output: dist is a bxNxM matrix where", "not given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 '''", "min_distances = torch.topk(dist_vec, k=1, dim=2, largest=False).values chamfer = torch.sum(min_distances, dim=1)", "loss torch (cuda):', chamfer(x.cuda(), y.cuda())) # import sys # sys.path.append(\"../distance/chamfer/\")", "the value of the chamfer distance for each sample of", "distance for each sample of the batch ''' dist_vec =", "= torch.tensor([ [ [0., 0., 0.], [0., 1., 0.], [0.,", "0.], ] ]) y = torch.tensor([ [ [0., 1., 0.],", "distances def chamfer_distance(x, y): ''' input x and y are", "torch.sum(differences * differences, -1) return distances def chamfer_distance(x, y): '''", "[ [1., 1., 0.], [1., 2., 0.], [0., 1., 0.],", "dist is a bxNxM matrix where dist[i,j] is the square", "is an optional bxMxd matirx Output: dist is a bxNxM", "__name__ == \"__main__\": x = torch.tensor([ [ [0., 0., 0.],", "0.], [1., 2., 0.], [0., 1., 0.], ] ]) chamfer", "1., 0.], [0., 1., 0.], ], [ [1., 1., 0.],", "], [ [1., 1., 0.], [1., 2., 0.], [0., 1.,", "0., 0.], [0., 1., 0.], [0., 1., 0.], ], [", "# CD = cd.chamferDist() # dist1, dist2, _, _= CD(x,", "is the square norm between x[i,:] and y[j,:] if y", "||x[i,:]-y[j,:]||^2 ''' differences = x.unsqueeze(2) - y.unsqueeze(1) distances = torch.sum(differences", "0.], ] ]) chamfer = ChamferLoss() print('chamfer loss torch (cpu):',", "torch.sum(chamfer) if __name__ == \"__main__\": x = torch.tensor([ [ [0.,", "forward(self, x, y): chamfer = chamfer_distance(x, y) return torch.sum(chamfer) if", "bxNxM matrix where dist[i,j] is the square norm between x[i,:]", "batch ''' dist_vec = expanded_pairwise_distances(x, y) min_distances = torch.topk(dist_vec, k=1,", "class ChamferLoss(torch.nn.Module): def forward(self, x, y): chamfer = chamfer_distance(x, y)", "for 3D) output is a bx1 Matrix with the value", "sys.path.append(\"../distance/chamfer/\") # import dist_chamfer as cd # CD = cd.chamferDist()", "= torch.topk(dist_vec, k=1, dim=2, largest=False).values chamfer = torch.sum(min_distances, dim=1) /", "= cd.chamferDist() # dist1, dist2, _, _= CD(x, y) #", "torch.tensor([ [ [0., 0., 0.], [0., 1., 0.], [0., 1.,", "[1., 2., 0.], [0., 1., 0.], ] ]) chamfer =", "0.], [0., 1., 0.], ] ]) chamfer = ChamferLoss() print('chamfer", "import sys # sys.path.append(\"../distance/chamfer/\") # import dist_chamfer as cd #", "matirx Output: dist is a bxNxM matrix where dist[i,j] is", "dist[i,j] is the square norm between x[i,:] and y[j,:] if", "y is an optional bxMxd matirx Output: dist is a", "def forward(self, x, y): chamfer = chamfer_distance(x, y) return torch.sum(chamfer)", "is not given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2", "expanded_pairwise_distances(x, y) min_distances = torch.topk(dist_vec, k=1, dim=2, largest=False).values chamfer =", "dim=2, largest=False).values chamfer = torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1]) return chamfer", "2D or 3 for 3D) output is a bx1 Matrix", "chamfer(x, y)) print('chamfer loss torch (cuda):', chamfer(x.cuda(), y.cuda())) # import", "dist_chamfer as cd # CD = cd.chamferDist() # dist1, dist2,", "each sample of the batch ''' dist_vec = expanded_pairwise_distances(x, y)", "given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' differences", "Matrix with the value of the chamfer distance for each", "- y.unsqueeze(1) distances = torch.sum(differences * differences, -1) return distances", "return distances def chamfer_distance(x, y): ''' input x and y", "0.], [0., 1., 0.], [0., 1., 0.], ], [ [1.,", "\"__main__\": x = torch.tensor([ [ [0., 0., 0.], [0., 1.,", "y.cuda())) # import sys # sys.path.append(\"../distance/chamfer/\") # import dist_chamfer as", "torch.tensor([ [ [0., 1., 0.], [0., 1., 0.], [0., 1.,", "0.], [0., 1., 0.], ], [ [1., 1., 0.], [1.,", "]) chamfer = ChamferLoss() print('chamfer loss torch (cpu):', chamfer(x, y))", "dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' differences = x.unsqueeze(2) - y.unsqueeze(1) distances", "is a bx1 Matrix with the value of the chamfer", "== \"__main__\": x = torch.tensor([ [ [0., 0., 0.], [0.,", "cd # CD = cd.chamferDist() # dist1, dist2, _, _=", "[1., 2., 0.], [0., 1., 0.], ] ]) y =", "loss torch (cpu):', chamfer(x, y)) print('chamfer loss torch (cuda):', chamfer(x.cuda(),", "b: batch, N:number of point, M: point dim (ex. 2", "matrix y is an optional bxMxd matirx Output: dist is", "y) return torch.sum(chamfer) if __name__ == \"__main__\": x = torch.tensor([", "y): ''' Input: x is a bxNxd matrix y is", "y): ''' input x and y are bxNxM matrix, b:", "torch (cuda):', chamfer(x.cuda(), y.cuda())) # import sys # sys.path.append(\"../distance/chamfer/\") #", "# import dist_chamfer as cd # CD = cd.chamferDist() #", "''' differences = x.unsqueeze(2) - y.unsqueeze(1) distances = torch.sum(differences *", "print('chamfer loss torch (cpu):', chamfer(x, y)) print('chamfer loss torch (cuda):',", "y are bxNxM matrix, b: batch, N:number of point, M:", "y)) print('chamfer loss torch (cuda):', chamfer(x.cuda(), y.cuda())) # import sys", "dim=1) / torch.tensor(x.shape[1]) return chamfer class ChamferLoss(torch.nn.Module): def forward(self, x,", "sys # sys.path.append(\"../distance/chamfer/\") # import dist_chamfer as cd # CD", "N:number of point, M: point dim (ex. 2 for 2D", "chamfer_distance(x, y) return torch.sum(chamfer) if __name__ == \"__main__\": x =", "for each sample of the batch ''' dist_vec = expanded_pairwise_distances(x,", "M: point dim (ex. 2 for 2D or 3 for", "print('chamfer loss torch (cuda):', chamfer(x.cuda(), y.cuda())) # import sys #", "torch (cpu):', chamfer(x, y)) print('chamfer loss torch (cuda):', chamfer(x.cuda(), y.cuda()))", "2 for 2D or 3 for 3D) output is a", "chamfer_distance(x, y): ''' input x and y are bxNxM matrix,", "= chamfer_distance(x, y) return torch.sum(chamfer) if __name__ == \"__main__\": x", "CD = cd.chamferDist() # dist1, dist2, _, _= CD(x, y)", "x is a bxNxd matrix y is an optional bxMxd", "chamfer distance for each sample of the batch ''' dist_vec", "= torch.tensor([ [ [0., 1., 0.], [0., 1., 0.], [0.,", "y) min_distances = torch.topk(dist_vec, k=1, dim=2, largest=False).values chamfer = torch.sum(min_distances,", "expanded_pairwise_distances(x, y): ''' Input: x is a bxNxd matrix y", "x[i,:] and y[j,:] if y is not given then use", "as cd # CD = cd.chamferDist() # dist1, dist2, _,", "x.unsqueeze(2) - y.unsqueeze(1) distances = torch.sum(differences * differences, -1) return", "] ]) y = torch.tensor([ [ [0., 1., 0.], [0.,", "a bx1 Matrix with the value of the chamfer distance", "(cuda):', chamfer(x.cuda(), y.cuda())) # import sys # sys.path.append(\"../distance/chamfer/\") # import", "# import sys # sys.path.append(\"../distance/chamfer/\") # import dist_chamfer as cd", "output is a bx1 Matrix with the value of the", "bx1 Matrix with the value of the chamfer distance for", "largest=False).values chamfer = torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1]) return chamfer class", "] ]) chamfer = ChamferLoss() print('chamfer loss torch (cpu):', chamfer(x,", "the chamfer distance for each sample of the batch '''", "'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' differences = x.unsqueeze(2) -", "2., 0.], [0., 1., 0.], ] ]) chamfer = ChamferLoss()", "1., 0.], ], [ [1., 1., 0.], [1., 2., 0.],", "1., 0.], ] ]) y = torch.tensor([ [ [0., 1.,", "''' Input: x is a bxNxd matrix y is an", "y.unsqueeze(1) distances = torch.sum(differences * differences, -1) return distances def", "bxNxd matrix y is an optional bxMxd matirx Output: dist", "(cpu):', chamfer(x, y)) print('chamfer loss torch (cuda):', chamfer(x.cuda(), y.cuda())) #", "differences = x.unsqueeze(2) - y.unsqueeze(1) distances = torch.sum(differences * differences,", "or 3 for 3D) output is a bx1 Matrix with", "return torch.sum(chamfer) if __name__ == \"__main__\": x = torch.tensor([ [", "torch.sum(min_distances, dim=1) / torch.tensor(x.shape[1]) return chamfer class ChamferLoss(torch.nn.Module): def forward(self,", "the batch ''' dist_vec = expanded_pairwise_distances(x, y) min_distances = torch.topk(dist_vec,", "point, M: point dim (ex. 2 for 2D or 3", "def expanded_pairwise_distances(x, y): ''' Input: x is a bxNxd matrix", "return chamfer class ChamferLoss(torch.nn.Module): def forward(self, x, y): chamfer =", "[1., 1., 0.], [1., 2., 0.], [0., 1., 0.], ]", "chamfer(x.cuda(), y.cuda())) # import sys # sys.path.append(\"../distance/chamfer/\") # import dist_chamfer", "cd.chamferDist() # dist1, dist2, _, _= CD(x, y) # print('orig',", "dist_vec = expanded_pairwise_distances(x, y) min_distances = torch.topk(dist_vec, k=1, dim=2, largest=False).values", "chamfer = chamfer_distance(x, y) return torch.sum(chamfer) if __name__ == \"__main__\":", "1., 0.], ] ]) chamfer = ChamferLoss() print('chamfer loss torch", "point dim (ex. 2 for 2D or 3 for 3D)", "an optional bxMxd matirx Output: dist is a bxNxM matrix" ]
[ "def create_acrelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search',", "return HttpResponseRedirect(target) context.update({ 'form': form, }) template = 'curation/authority_aarelation_changeview.html' return", "'authorities', 'instance': authority, 'search_key': search_key, 'current_index': current_index } if request.method", "pk=acrelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context", "return render(request, template, context) @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit',", "template = 'curation/authority_aarelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u: u.is_superuser", "prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations'", "aarelation=AARelation() aarelation.subject = authority type_controlled = request.GET.get('type_controlled', None) if type_controlled:", "args=(authority.id,)) + '?tab=acrelations' if search_key and current_index: target += '&search=%s&current=%s'", "acrelation, 'search_key': search_key, 'current_index': current_index } if request.method == 'GET':", "if type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation', instance=aarelation) elif", "if request.method == 'GET': form = AARelationForm(instance=aarelation, prefix='aarelation') elif request.method", "import permission_required, objectgetter from isisdata.models import * from isisdata.utils import", "ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method == 'POST': form = ACRelationForm(request.POST, instance=acrelation,", "instance=acrelation, prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) +", "search_key, 'current_index': current_index } if request.method == 'GET': initial =", "== 'GET': initial = { 'authority': authority.id, 'name_for_display_in_citation': authority.name }", "@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request, authority_id, acrelation_id): authority = get_object_or_404(Authority,", "reverse from django.contrib.admin.views.decorators import staff_member_required, user_passes_test from rules.contrib.views import permission_required,", "= get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key = request.GET.get('search',", "== 'GET': initial = { 'subject': authority.id } aarelation=AARelation() aarelation.subject", "} type_controlled = request.GET.get('type_controlled', None) if type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form", "isisdata import operations from isisdata.filters import * from isisdata import", "fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None): authority = get_object_or_404(Authority,", "current_index } if request.method == 'GET': form = AARelationForm(instance=aarelation, prefix='aarelation')", "if search_key and current_index: target += '&search=%s&current=%s' % (search_key, current_index)", "#, HttpResponseForbidden, Http404, , JsonResponse from django.shortcuts import get_object_or_404, render,", "'GET': initial = { 'authority': authority.id, 'name_for_display_in_citation': authority.name } type_controlled", "+ '?tab=aarelations' if search_key and current_index: target += '&search=%s&current=%s' %", "'curation_subsection': 'authorities', 'instance': authority, 'acrelation': acrelation, 'search_key': search_key, 'current_index': current_index", "isisdata import tasks as data_tasks from curation import p3_port_utils from", "current_index: target += '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) context.update({", "from django.contrib.admin.views.decorators import staff_member_required, user_passes_test from rules.contrib.views import permission_required, objectgetter", "JsonResponse({'result': True}) target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key", "fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key", "'curation/authority_aarelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u: u.is_superuser or u.is_staff)", "elif request.method == 'POST': form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if", "u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request, authority_id): authority = get_object_or_404(Authority,", "'name_for_display_in_citation': authority.name } type_controlled = request.GET.get('type_controlled', None) if type_controlled: initial.update({'type_controlled':", "current_index: target += '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) if", "current_index) return HttpResponseRedirect(target) context.update({ 'form': form, }) template = 'curation/authority_acrelation_changeview.html'", "form, }) template = 'curation/authority_aarelation_changeview.html' return render(request, template, context) @user_passes_test(lambda", "import print_function from __future__ import division from __future__ import unicode_literals", "create_acrelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search', request.POST.get('search'))", "= ACRelationForm(request.POST, prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,))", "target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key and current_index:", "= AARelationForm(request.POST, prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,))", "= get_object_or_404(Authority, pk=authority_id) acrelation = get_object_or_404(ACRelation, pk=acrelation_id) search_key = request.GET.get('search',", "fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request, authority_id, acrelation_id): authority = get_object_or_404(Authority, pk=authority_id)", "HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden, Http404, , JsonResponse from django.shortcuts", "import division from __future__ import unicode_literals from django.http import HttpResponse,", "from curation.forms import * from curation.contrib.views import check_rules @user_passes_test(lambda u:", "'authorities', 'instance': authority, 'acrelation': acrelation, 'search_key': search_key, 'current_index': current_index }", "import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden, Http404, , JsonResponse", "request.method == 'GET': form = AARelationForm(instance=aarelation, prefix='aarelation') elif request.method ==", "pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index", "from __future__ import unicode_literals from django.http import HttpResponse, HttpResponseRedirect, JsonResponse,", "'authority': authority.id, 'name_for_display_in_citation': authority.name } type_controlled = request.GET.get('type_controlled', None) if", "form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if form.is_valid(): form.save() target =", "search_key, 'current_index': current_index } if request.POST.get('confirm', False) == 'true': if", "datetime.datetime.now() aarelation.delete() if format == 'json': return JsonResponse({'result': True}) target", "request.method == 'GET': initial = { 'authority': authority.id, 'name_for_display_in_citation': authority.name", "@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id)", "authority.id, 'name_for_display_in_citation': authority.name } type_controlled = request.GET.get('type_controlled', None) if type_controlled:", "authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key =", "type_controlled = request.GET.get('type_controlled', None) if type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form", "{ 'subject': authority.id } aarelation=AARelation() aarelation.subject = authority type_controlled =", "if request.method == 'GET': initial = { 'subject': authority.id }", "= authority type_controlled = request.GET.get('type_controlled', None) if type_controlled: aarelation =", "= dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation', instance=aarelation) elif request.method == 'POST':", "ACRelationForm(request.POST, prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) +", "instance=aarelation, prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) +", "request.method == 'POST': form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if form.is_valid():", "'POST': form = AARelationForm(request.POST, prefix='aarelation') if form.is_valid(): form.save() target =", "'authority_id')) def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None): authority = get_object_or_404(Authority, pk=authority_id)", "if request.POST.get('confirm', False) == 'true': if not aarelation.modified_on: aarelation.modified_on =", "'current_index': current_index } if request.method == 'GET': form = AARelationForm(instance=aarelation,", "or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request, authority_id): authority =", "authority type_controlled = request.GET.get('type_controlled', None) if type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled]", "authority, 'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index } if request.POST.get('confirm',", "None) if type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form = ACRelationForm(prefix='acrelation', initial=initial) elif", "} if request.method == 'GET': initial = { 'authority': authority.id,", "+ '?tab=acrelations' if search_key and current_index: target += '&search=%s&current=%s' %", "import check_rules @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))", "authority = get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search', request.POST.get('search')) current_index =", "context = { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'search_key':", "__future__ import absolute_import from __future__ import print_function from __future__ import", "== 'POST': form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if form.is_valid(): form.save()", "aarelation_id, format=None): authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id)", "get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key = request.GET.get('search', request.POST.get('search'))", "== 'POST': form = AARelationForm(request.POST, prefix='aarelation') if form.is_valid(): form.save() target", "'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'aarelation': aarelation, 'search_key': search_key, 'current_index':", "def acrelation_for_authority(request, authority_id, acrelation_id): authority = get_object_or_404(Authority, pk=authority_id) acrelation =", "HttpResponseRedirect(target) if format == 'json': return JsonResponse({'result': False}) template =", "form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations' if search_key", "u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request, authority_id,", "} if request.method == 'GET': form = AARelationForm(instance=aarelation, prefix='aarelation') elif", "search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context =", "'curation/authority_acrelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u: u.is_superuser or u.is_staff)", "form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key", "'?tab=aarelations' if search_key and current_index: target += '&search=%s&current=%s' % (search_key,", "get_object_or_404(ACRelation, pk=acrelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current'))", "'search_key': search_key, 'current_index': current_index } if request.POST.get('confirm', False) == 'true':", "context) @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def", "current_index) return HttpResponseRedirect(target) if format == 'json': return JsonResponse({'result': False})", "division from __future__ import unicode_literals from django.http import HttpResponse, HttpResponseRedirect,", "reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key and current_index: target +=", "current_index } if request.POST.get('confirm', False) == 'true': if not aarelation.modified_on:", "* from isisdata.utils import strip_punctuation, normalize from isisdata import operations", "pk=authority_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context", "request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context = { 'curation_section': 'datasets',", "= AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority',", "from __future__ import print_function from __future__ import division from __future__", "AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,))", "aarelation, 'search_key': search_key, 'current_index': current_index } if request.POST.get('confirm', False) ==", "'json': return JsonResponse({'result': True}) target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations'", "u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request, authority_id, acrelation_id): authority =", "if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations' if", "= request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context = {", "= request.GET.get('current', request.POST.get('current')) context = { 'curation_section': 'datasets', 'curation_subsection': 'authorities',", "authority.name } type_controlled = request.GET.get('type_controlled', None) if type_controlled: initial.update({'type_controlled': type_controlled.upper()})", "elif request.method == 'POST': form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if", "if request.method == 'GET': initial = { 'authority': authority.id, 'name_for_display_in_citation':", "'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'search_key': search_key, 'current_index': current_index", "AARelationForm(request.POST, prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) +", "'authority_id')) def acrelation_for_authority(request, authority_id, acrelation_id): authority = get_object_or_404(Authority, pk=authority_id) acrelation", "JsonResponse from django.shortcuts import get_object_or_404, render, redirect from django.urls import", "from isisdata import tasks as data_tasks from curation import p3_port_utils", "return JsonResponse({'result': False}) template = 'curation/authority_aarelation_delete.html' return render(request, template, context)", "import unicode_literals from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #,", "form = ACRelationForm(prefix='acrelation', initial=initial) elif request.method == 'POST': form =", "from isisdata.filters import * from isisdata import tasks as data_tasks", "aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation', instance=aarelation) elif request.method ==", "form, }) template = 'curation/authority_acrelation_changeview.html' return render(request, template, context) @user_passes_test(lambda", "current_index } if request.method == 'GET': form = ACRelationForm(instance=acrelation, prefix='acrelation')", "Http404, , JsonResponse from django.shortcuts import get_object_or_404, render, redirect from", "'instance': authority, 'acrelation': acrelation, 'search_key': search_key, 'current_index': current_index } if", "} if request.method == 'GET': initial = { 'subject': authority.id", "+= '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) context.update({ 'form': form,", "} if request.POST.get('confirm', False) == 'true': if not aarelation.modified_on: aarelation.modified_on", "prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations'", "'search_key': search_key, 'current_index': current_index } if request.method == 'GET': initial", "HttpResponseForbidden, Http404, , JsonResponse from django.shortcuts import get_object_or_404, render, redirect", "type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form = ACRelationForm(prefix='acrelation', initial=initial) elif request.method ==", "'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index } if request.method ==", "AARelationForm(instance=aarelation, prefix='aarelation') elif request.method == 'POST': form = AARelationForm(request.POST, instance=aarelation,", "= ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method == 'POST': form = ACRelationForm(request.POST,", "return HttpResponseRedirect(target) if format == 'json': return JsonResponse({'result': False}) template", "staff_member_required, user_passes_test from rules.contrib.views import permission_required, objectgetter from isisdata.models import", "aarelation.delete() if format == 'json': return JsonResponse({'result': True}) target =", "from isisdata.utils import strip_punctuation, normalize from isisdata import operations from", "== 'POST': form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if form.is_valid(): form.save()", "% (search_key, current_index) return HttpResponseRedirect(target) context.update({ 'form': form, }) template", "p3_port_utils from curation.forms import * from curation.contrib.views import check_rules @user_passes_test(lambda", "= { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'aarelation': aarelation,", "authority, 'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index } if request.method", "JsonResponse, QueryDict #, HttpResponseForbidden, Http404, , JsonResponse from django.shortcuts import", "from curation import p3_port_utils from curation.forms import * from curation.contrib.views", "import reverse from django.contrib.admin.views.decorators import staff_member_required, user_passes_test from rules.contrib.views import", "context.update({ 'form': form, }) template = 'curation/authority_acrelation_changeview.html' return render(request, template,", "__future__ import print_function from __future__ import division from __future__ import", "not aarelation.modified_on: aarelation.modified_on = datetime.datetime.now() aarelation.delete() if format == 'json':", "}) template = 'curation/authority_aarelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u:", "as data_tasks from curation import p3_port_utils from curation.forms import *", "acrelation_id): authority = get_object_or_404(Authority, pk=authority_id) acrelation = get_object_or_404(ACRelation, pk=acrelation_id) search_key", "{ 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'aarelation': aarelation, 'search_key':", "{ 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'search_key': search_key, 'current_index':", "@user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request,", "target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations' if search_key and current_index:", "from curation.contrib.views import check_rules @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit',", "authority_id, acrelation_id): authority = get_object_or_404(Authority, pk=authority_id) acrelation = get_object_or_404(ACRelation, pk=acrelation_id)", "strip_punctuation, normalize from isisdata import operations from isisdata.filters import *", "authority, 'search_key': search_key, 'current_index': current_index } if request.method == 'GET':", "isisdata.models import * from isisdata.utils import strip_punctuation, normalize from isisdata", "if format == 'json': return JsonResponse({'result': False}) template = 'curation/authority_aarelation_delete.html'", "form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key and", "'json': return JsonResponse({'result': False}) template = 'curation/authority_aarelation_delete.html' return render(request, template,", "def create_aarelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search',", "current_index) return HttpResponseRedirect(target) context.update({ 'form': form, }) template = 'curation/authority_aarelation_changeview.html'", "from rules.contrib.views import permission_required, objectgetter from isisdata.models import * from", "form = AARelationForm(prefix='aarelation', instance=aarelation) elif request.method == 'POST': form =", "type_controlled.upper()}) form = ACRelationForm(prefix='acrelation', initial=initial) elif request.method == 'POST': form", "import * from curation.contrib.views import check_rules @user_passes_test(lambda u: u.is_superuser or", "request.method == 'GET': form = ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method ==", "authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search', request.POST.get('search')) current_index", "'search_key': search_key, 'current_index': current_index } if request.method == 'GET': form", "request.method == 'POST': form = ACRelationForm(request.POST, prefix='acrelation') if form.is_valid(): form.save()", "'GET': form = AARelationForm(instance=aarelation, prefix='aarelation') elif request.method == 'POST': form", "QueryDict #, HttpResponseForbidden, Http404, , JsonResponse from django.shortcuts import get_object_or_404,", "== 'POST': form = ACRelationForm(request.POST, prefix='acrelation') if form.is_valid(): form.save() target", "ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,))", "@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id)", "prefix='acrelation') elif request.method == 'POST': form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation')", "data_tasks from curation import p3_port_utils from curation.forms import * from", "= AARelationForm(instance=aarelation, prefix='aarelation') elif request.method == 'POST': form = AARelationForm(request.POST,", "= ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority',", "}) template = 'curation/authority_acrelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u:", "args=(authority.id,)) + '?tab=aarelations' if search_key and current_index: target += '&search=%s&current=%s'", "target += '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) if format", "+= '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) if format ==", "AARelationForm(prefix='aarelation', instance=aarelation) elif request.method == 'POST': form = AARelationForm(request.POST, prefix='aarelation')", "== 'GET': form = ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method == 'POST':", "@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request, authority_id, aarelation_id): authority = get_object_or_404(Authority,", "request.method == 'GET': initial = { 'subject': authority.id } aarelation=AARelation()", "import get_object_or_404, render, redirect from django.urls import reverse from django.contrib.admin.views.decorators", "initial.update({'type_controlled': type_controlled.upper()}) form = ACRelationForm(prefix='acrelation', initial=initial) elif request.method == 'POST':", "import p3_port_utils from curation.forms import * from curation.contrib.views import check_rules", "'instance': authority, 'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index } if", "request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context = { 'curation_section':", "template, context) @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))", "'authorities', 'instance': authority, 'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index }", "'authority_id')) def aarelation_for_authority(request, authority_id, aarelation_id): authority = get_object_or_404(Authority, pk=authority_id) aarelation", "return JsonResponse({'result': True}) target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if", "request.POST.get('current')) context = { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority,", "or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None):", "= { 'authority': authority.id, 'name_for_display_in_citation': authority.name } type_controlled = request.GET.get('type_controlled',", "= { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'search_key': search_key,", "if format == 'json': return JsonResponse({'result': True}) target = reverse('curation:curate_authority',", "get_object_or_404, render, redirect from django.urls import reverse from django.contrib.admin.views.decorators import", "type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation', instance=aarelation) elif request.method", "def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None): authority = get_object_or_404(Authority, pk=authority_id) aarelation", "@user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request,", "curation import p3_port_utils from curation.forms import * from curation.contrib.views import", "'form': form, }) template = 'curation/authority_acrelation_changeview.html' return render(request, template, context)", "or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request, authority_id): authority =", "'curation_subsection': 'authorities', 'instance': authority, 'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index", "import * from isisdata import tasks as data_tasks from curation", "acrelation_for_authority(request, authority_id, acrelation_id): authority = get_object_or_404(Authority, pk=authority_id) acrelation = get_object_or_404(ACRelation,", "aarelation, 'search_key': search_key, 'current_index': current_index } if request.method == 'GET':", "initial = { 'subject': authority.id } aarelation=AARelation() aarelation.subject = authority", "__future__ import division from __future__ import unicode_literals from django.http import", "u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request, authority_id, acrelation_id):", "form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if form.is_valid(): form.save() target =", "import operations from isisdata.filters import * from isisdata import tasks", "if form.is_valid(): form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if", "unicode_literals from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden,", "print_function from __future__ import division from __future__ import unicode_literals from", "isisdata.utils import strip_punctuation, normalize from isisdata import operations from isisdata.filters", "aarelation.modified_on = datetime.datetime.now() aarelation.delete() if format == 'json': return JsonResponse({'result':", "'POST': form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if form.is_valid(): form.save() target", "current_index } if request.method == 'GET': initial = { 'subject':", "aarelation.subject = authority type_controlled = request.GET.get('type_controlled', None) if type_controlled: aarelation", "'aarelation': aarelation, 'search_key': search_key, 'current_index': current_index } if request.POST.get('confirm', False)", "form = ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method == 'POST': form =", "'GET': form = ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method == 'POST': form", "== 'true': if not aarelation.modified_on: aarelation.modified_on = datetime.datetime.now() aarelation.delete() if", "from __future__ import absolute_import from __future__ import print_function from __future__", "authority, 'acrelation': acrelation, 'search_key': search_key, 'current_index': current_index } if request.method", "@user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request,", "django.urls import reverse from django.contrib.admin.views.decorators import staff_member_required, user_passes_test from rules.contrib.views", "u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request, authority_id): authority = get_object_or_404(Authority,", "HttpResponseRedirect(target) context.update({ 'form': form, }) template = 'curation/authority_acrelation_changeview.html' return render(request,", "if type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form = ACRelationForm(prefix='acrelation', initial=initial) elif request.method", "'authority_id')) def create_aarelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key =", "isisdata.filters import * from isisdata import tasks as data_tasks from", "'current_index': current_index } if request.method == 'GET': initial = {", "render(request, template, context) @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority,", "HttpResponseRedirect(target) context.update({ 'form': form, }) template = 'curation/authority_aarelation_changeview.html' return render(request,", "u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request, authority_id): authority", "'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'acrelation': acrelation, 'search_key': search_key, 'current_index':", "rules.contrib.views import permission_required, objectgetter from isisdata.models import * from isisdata.utils", "u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request, authority_id, aarelation_id): authority =", "= 'curation/authority_aarelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u: u.is_superuser or", "'GET': initial = { 'subject': authority.id } aarelation=AARelation() aarelation.subject =", "template = 'curation/authority_acrelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u: u.is_superuser", "user_passes_test from rules.contrib.views import permission_required, objectgetter from isisdata.models import *", "* from curation.contrib.views import check_rules @user_passes_test(lambda u: u.is_superuser or u.is_staff)", "absolute_import from __future__ import print_function from __future__ import division from", "'&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) context.update({ 'form': form, })", "type_controlled = request.GET.get('type_controlled', None) if type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form =", "'current_index': current_index } if request.method == 'GET': form = ACRelationForm(instance=acrelation,", "u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request, authority_id):", "None) if type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation', instance=aarelation)", "request.method == 'POST': form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation') if form.is_valid():", ", JsonResponse from django.shortcuts import get_object_or_404, render, redirect from django.urls", "import absolute_import from __future__ import print_function from __future__ import division", "import * from isisdata.utils import strip_punctuation, normalize from isisdata import", "dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation', instance=aarelation) elif request.method == 'POST': form", "False) == 'true': if not aarelation.modified_on: aarelation.modified_on = datetime.datetime.now() aarelation.delete()", "'POST': form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation') if form.is_valid(): form.save() target", "if not aarelation.modified_on: aarelation.modified_on = datetime.datetime.now() aarelation.delete() if format ==", "objectgetter from isisdata.models import * from isisdata.utils import strip_punctuation, normalize", "pk=aarelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current')) context", "def aarelation_for_authority(request, authority_id, aarelation_id): authority = get_object_or_404(Authority, pk=authority_id) aarelation =", "== 'json': return JsonResponse({'result': True}) target = reverse('curation:curate_authority', args=(authority.id,)) +", "= 'curation/authority_acrelation_changeview.html' return render(request, template, context) @user_passes_test(lambda u: u.is_superuser or", "@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None): authority =", "== 'GET': form = AARelationForm(instance=aarelation, prefix='aarelation') elif request.method == 'POST':", "= reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key and current_index: target", "current_index = request.GET.get('current', request.POST.get('current')) context = { 'curation_section': 'datasets', 'curation_subsection':", "context.update({ 'form': form, }) template = 'curation/authority_aarelation_changeview.html' return render(request, template,", "'true': if not aarelation.modified_on: aarelation.modified_on = datetime.datetime.now() aarelation.delete() if format", "or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def acrelation_for_authority(request, authority_id, acrelation_id): authority", "authority_id, aarelation_id, format=None): authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation,", "(search_key, current_index) return HttpResponseRedirect(target) if format == 'json': return JsonResponse({'result':", "reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations' if search_key and current_index: target +=", "form = AARelationForm(request.POST, prefix='aarelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority',", "= { 'subject': authority.id } aarelation=AARelation() aarelation.subject = authority type_controlled", "delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None): authority = get_object_or_404(Authority, pk=authority_id) aarelation =", "from django.urls import reverse from django.contrib.admin.views.decorators import staff_member_required, user_passes_test from", "from __future__ import division from __future__ import unicode_literals from django.http", "'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'acrelation': acrelation, 'search_key': search_key,", "from django.shortcuts import get_object_or_404, render, redirect from django.urls import reverse", "fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key", "import staff_member_required, user_passes_test from rules.contrib.views import permission_required, objectgetter from isisdata.models", "elif request.method == 'POST': form = AARelationForm(request.POST, prefix='aarelation') if form.is_valid():", "(search_key, current_index) return HttpResponseRedirect(target) context.update({ 'form': form, }) template =", "pk=authority_id) acrelation = get_object_or_404(ACRelation, pk=acrelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index", "'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'aarelation': aarelation, 'search_key': search_key,", "target += '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) context.update({ 'form':", "'form': form, }) template = 'curation/authority_aarelation_changeview.html' return render(request, template, context)", "or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request, authority_id, aarelation_id): authority", "get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current'))", "'instance': authority, 'search_key': search_key, 'current_index': current_index } if request.method ==", "django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden, Http404, ,", "{ 'authority': authority.id, 'name_for_display_in_citation': authority.name } type_controlled = request.GET.get('type_controlled', None)", "u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request, authority_id): authority", "u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request, authority_id, aarelation_id,", "import tasks as data_tasks from curation import p3_port_utils from curation.forms", "import strip_punctuation, normalize from isisdata import operations from isisdata.filters import", "'current_index': current_index } if request.POST.get('confirm', False) == 'true': if not", "instance=aarelation) elif request.method == 'POST': form = AARelationForm(request.POST, prefix='aarelation') if", "'acrelation': acrelation, 'search_key': search_key, 'current_index': current_index } if request.method ==", "authority_id, aarelation_id): authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id)", "create_aarelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search', request.POST.get('search'))", "{ 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'acrelation': acrelation, 'search_key':", "u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request, authority_id, aarelation_id):", "'POST': form = ACRelationForm(request.POST, prefix='acrelation') if form.is_valid(): form.save() target =", "authority.id } aarelation=AARelation() aarelation.subject = authority type_controlled = request.GET.get('type_controlled', None)", "True}) target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations' if search_key and", "request.POST.get('confirm', False) == 'true': if not aarelation.modified_on: aarelation.modified_on = datetime.datetime.now()", "request.GET.get('current', request.POST.get('current')) context = { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance':", "context = { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'aarelation':", "= request.GET.get('type_controlled', None) if type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form =", "'?tab=acrelations' if search_key and current_index: target += '&search=%s&current=%s' % (search_key,", "HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden, Http404, , JsonResponse from", "= get_object_or_404(Authority, pk=authority_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current',", "from isisdata import operations from isisdata.filters import * from isisdata", "aarelation.modified_on: aarelation.modified_on = datetime.datetime.now() aarelation.delete() if format == 'json': return", "= ACRelationForm(prefix='acrelation', initial=initial) elif request.method == 'POST': form = ACRelationForm(request.POST,", "form = ACRelationForm(request.POST, prefix='acrelation') if form.is_valid(): form.save() target = reverse('curation:curate_authority',", "curation.forms import * from curation.contrib.views import check_rules @user_passes_test(lambda u: u.is_superuser", "@user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_acrelation_for_authority(request,", "u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def create_aarelation_for_authority(request, authority_id):", "context = { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'acrelation':", "u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None): authority", "format=None): authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key", "request.method == 'POST': form = AARelationForm(request.POST, prefix='aarelation') if form.is_valid(): form.save()", "redirect from django.urls import reverse from django.contrib.admin.views.decorators import staff_member_required, user_passes_test", "fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request, authority_id, aarelation_id): authority = get_object_or_404(Authority, pk=authority_id)", "aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index =", "format == 'json': return JsonResponse({'result': False}) template = 'curation/authority_aarelation_delete.html' return", "= { 'curation_section': 'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'acrelation': acrelation,", "} aarelation=AARelation() aarelation.subject = authority type_controlled = request.GET.get('type_controlled', None) if", "== 'json': return JsonResponse({'result': False}) template = 'curation/authority_aarelation_delete.html' return render(request,", "current_index } if request.method == 'GET': initial = { 'authority':", "acrelation = get_object_or_404(ACRelation, pk=acrelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index =", "u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request, authority_id,", "initial=initial) elif request.method == 'POST': form = ACRelationForm(request.POST, prefix='acrelation') if", "render, redirect from django.urls import reverse from django.contrib.admin.views.decorators import staff_member_required,", "curation.contrib.views import check_rules @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority,", "search_key, 'current_index': current_index } if request.method == 'GET': form =", "'subject': authority.id } aarelation=AARelation() aarelation.subject = authority type_controlled = request.GET.get('type_controlled',", "get_object_or_404(Authority, pk=authority_id) acrelation = get_object_or_404(ACRelation, pk=acrelation_id) search_key = request.GET.get('search', request.POST.get('search'))", "permission_required, objectgetter from isisdata.models import * from isisdata.utils import strip_punctuation,", "from isisdata.models import * from isisdata.utils import strip_punctuation, normalize from", "} if request.method == 'GET': form = ACRelationForm(instance=acrelation, prefix='acrelation') elif", "prefix='aarelation') elif request.method == 'POST': form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation')", "u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def delete_aarelation_for_authority(request, authority_id,", "initial = { 'authority': authority.id, 'name_for_display_in_citation': authority.name } type_controlled =", "search_key and current_index: target += '&search=%s&current=%s' % (search_key, current_index) return", "aarelation_id): authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation, pk=aarelation_id) search_key", "* from isisdata import tasks as data_tasks from curation import", "operations from isisdata.filters import * from isisdata import tasks as", "elif request.method == 'POST': form = ACRelationForm(request.POST, prefix='acrelation') if form.is_valid():", "authority = get_object_or_404(Authority, pk=authority_id) acrelation = get_object_or_404(ACRelation, pk=acrelation_id) search_key =", "check_rules @user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def", "return HttpResponseRedirect(target) context.update({ 'form': form, }) template = 'curation/authority_acrelation_changeview.html' return", "'authority_id')) def create_acrelation_for_authority(request, authority_id): authority = get_object_or_404(Authority, pk=authority_id) search_key =", "form = AARelationForm(instance=aarelation, prefix='aarelation') elif request.method == 'POST': form =", "% (search_key, current_index) return HttpResponseRedirect(target) if format == 'json': return", "django.shortcuts import get_object_or_404, render, redirect from django.urls import reverse from", "form.save() target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations' if search_key and", "@user_passes_test(lambda u: u.is_superuser or u.is_staff) @check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id')) def aarelation_for_authority(request,", "'datasets', 'curation_subsection': 'authorities', 'instance': authority, 'search_key': search_key, 'current_index': current_index }", "tasks as data_tasks from curation import p3_port_utils from curation.forms import", "__future__ import unicode_literals from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict", "get_object_or_404(AARelation, pk=aarelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current', request.POST.get('current'))", "= request.GET.get('type_controlled', None) if type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form = ACRelationForm(prefix='acrelation',", "format == 'json': return JsonResponse({'result': True}) target = reverse('curation:curate_authority', args=(authority.id,))", "and current_index: target += '&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target)", "request.GET.get('type_controlled', None) if type_controlled: aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled] form = AARelationForm(prefix='aarelation',", "aarelation_for_authority(request, authority_id, aarelation_id): authority = get_object_or_404(Authority, pk=authority_id) aarelation = get_object_or_404(AARelation,", "= datetime.datetime.now() aarelation.delete() if format == 'json': return JsonResponse({'result': True})", "'&search=%s&current=%s' % (search_key, current_index) return HttpResponseRedirect(target) if format == 'json':", "ACRelationForm(prefix='acrelation', initial=initial) elif request.method == 'POST': form = ACRelationForm(request.POST, prefix='acrelation')", "= AARelationForm(prefix='aarelation', instance=aarelation) elif request.method == 'POST': form = AARelationForm(request.POST,", "= reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations' if search_key and current_index: target", "from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden, Http404,", "request.GET.get('type_controlled', None) if type_controlled: initial.update({'type_controlled': type_controlled.upper()}) form = ACRelationForm(prefix='acrelation', initial=initial)", "= get_object_or_404(AARelation, pk=aarelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current',", "= get_object_or_404(ACRelation, pk=acrelation_id) search_key = request.GET.get('search', request.POST.get('search')) current_index = request.GET.get('current',", "django.contrib.admin.views.decorators import staff_member_required, user_passes_test from rules.contrib.views import permission_required, objectgetter from", "'curation_subsection': 'authorities', 'instance': authority, 'search_key': search_key, 'current_index': current_index } if", "if request.method == 'GET': form = ACRelationForm(instance=acrelation, prefix='acrelation') elif request.method", "normalize from isisdata import operations from isisdata.filters import * from" ]
[ "installed version: %s not installed or raising error.\", project_name) raise", "module = importlib.import_module(project_name) except Exception: logger.error(\"Cannot run tests on installed", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "project_name) raise else: # Use built source build_dir = build_project(project_name,", "for the project. Build directory can be modified by environment", "command. :return: The name of the project stored in root_dir", "args = [normalize_option(p) for p in sys.argv[1:] if p !=", "Synchrotron Radiation Facility # # Permission is hereby granted, free", "function in <project_package>.test, which returns a unittest.TestSuite. Test coverage dependencies:", "except Exception as error: logger.warning(\"h5py missing: %s\", error) else: logger.info(\"h5py", "build was performed \"\"\" platform = distutils.util.get_platform() architecture = \"lib.%s-%i.%i\"", "Exception as error: logger.warning(\"Numpy missing: %s\", error) else: logger.info(\"Numpy %s\",", "way home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\",", "warnings logging.captureWarnings(True) import warnings warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python", "%s\", sys.version, tuple.__itemsize__ * 8) try: import numpy except Exception", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "project :return: The path to the directory were build was", "%s\", root_dir) p = subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE)", "def normalize_option(option): option_parts = option.split(os.path.sep) if option_parts == [\"src\", \"silx\"]:", "2015-2021 European Synchrotron Radiation Facility # # Permission is hereby", "rights # to use, copy, modify, merge, publish, distribute, sublicense,", "in debug mode.\"\"\" try: import sysconfig except ImportError: # pragma", "true if the Python interpreter is in debug mode.\"\"\" try:", "name: Name of the project. :param str root_dir: Root directory", "return module if __name__ == \"__main__\": # Needed for multiprocessing", "permission notice shall be included in # all copies or", "error) else: logger.info(\"h5py %s\", h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve project name", "import distutils.sysconfig as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return True return hasattr(sys,", "logger.error(\"Cannot run tests on installed version: %s not installed or", "try: import numpy except Exception as error: logger.warning(\"Numpy missing: %s\",", "= importlib.import_module(project_name) except Exception: logger.error(\"Cannot run tests on installed version:", "Test coverage dependencies: coverage, lxml. \"\"\" __authors__ = [\"<NAME>\", \"<NAME>\"]", "sys.argv[1:] if p != \"--installed\"] # Run test on PROJECT_PATH", "%s not installed or raising error.\", project_name) raise else: #", "portions of the Software. # # THE SOFTWARE IS PROVIDED", "/*########################################################################## # # Copyright (c) 2015-2021 European Synchrotron Radiation Facility", "option args = [normalize_option(p) for p in sys.argv[1:] if p", "project_module = import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION = getattr(project_module, 'version', '') PROJECT_PATH", "project directory\"\"\" if \"--installed\" in sys.argv: try: module = importlib.import_module(project_name)", "added: '%s'\", build_dir) module = importlib.import_module(project_name) return module if __name__", "the project. :param str root_dir: Root directory of the project", "# # The above copyright notice and this permission notice", "setup.py --name in root_dir. :param str root_dir: Directory where to", "build_dir) logger.warning(\"Patched sys.path, added: '%s'\", build_dir) module = importlib.import_module(project_name) return", "sys.argv: try: module = importlib.import_module(project_name) except Exception: logger.error(\"Cannot run tests", "the project stored in root_dir \"\"\" logger.debug(\"Getting project name in", "SOFTWARE. # # ###########################################################################*/ \"\"\"Run the tests of the project.", "and associated documentation files (the \"Software\"), to deal # in", "Software without restriction, including without limitation the rights # to", "and to permit persons to whom the Software is #", "else: # Use built source build_dir = build_project(project_name, project_dir) if", "copies of the Software, and to permit persons to whom", "8) try: import numpy except Exception as error: logger.warning(\"Numpy missing:", "hereby granted, free of charge, to any person obtaining a", "numpy.version.version) try: import h5py except Exception as error: logger.warning(\"h5py missing:", "PROJECT_PATH if option_parts[:2] == [\"src\", \"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:]) return", "= distutils.util.get_platform() architecture = \"lib.%s-%i.%i\" % (platform, sys.version_info[0], sys.version_info[1]) if", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "distribute, sublicense, and/or sell # copies of the Software, and", "distutils.util import logging import os import subprocess import sys import", "The name of the project stored in root_dir \"\"\" logger.debug(\"Getting", "\"setup.py\", \"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data = p.communicate() logger.debug(\"subprocess", "logger.warning(\"Patched sys.path, added: '%s'\", build_dir) module = importlib.import_module(project_name) return module", "# all copies or substantial portions of the Software. #", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "the Python interpreter is in debug mode.\"\"\" try: import sysconfig", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve project name by running python setup.py", "debug mode.\"\"\" try: import sysconfig except ImportError: # pragma nocover", "in sys.argv[1:] if p != \"--installed\"] # Run test on", "variables. :param str name: Name of the project. :param str", "\"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") == name: # we are in the", "included in # all copies or substantial portions of the", "logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\", sys.version, tuple.__itemsize__ * 8) try:", "= os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home = os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building", "option_parts[:2] == [\"src\", \"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:]) return option args", "\"lib.%s-%i.%i\" % (platform, sys.version_info[0], sys.version_info[1]) if is_debug_python(): architecture += \"-pydebug\"", "a in args if not a.startswith(\"-\")] if len(without_options) == 0:", "try: import h5py except Exception as error: logger.warning(\"h5py missing: %s\",", "distutils.util.get_platform() architecture = \"lib.%s-%i.%i\" % (platform, sys.version_info[0], sys.version_info[1]) if is_debug_python():", "import logging import os import subprocess import sys import importlib", "\"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:]) return option args = [normalize_option(p) for", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "project. :param str root_dir: Root directory of the project :return:", "deal # in the Software without restriction, including without limitation", "warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\", sys.version, tuple.__itemsize__", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home): return alt_home def import_project_module(project_name, project_dir): \"\"\"Import", "build_dir) module = importlib.import_module(project_name) return module if __name__ == \"__main__\":", "notice and this permission notice shall be included in #", "None: logging.error(\"Built project is not available !!! investigate\") sys.path.insert(0, build_dir)", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "\"\"\"Run the tests of the project. This script expects a", "= os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else:", "logger.warning(\"Numpy missing: %s\", error) else: logger.info(\"Numpy %s\", numpy.version.version) try: import", "return home alt_home = os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home): return alt_home", "pragma nocover # Python < 2.7 import distutils.sysconfig as sysconfig", "root_dir \"\"\" logger.debug(\"Getting project name in %s\", root_dir) p =", "== 0: args += [PROJECT_PATH] argv = [\"--rootdir\", PROJECT_PATH] +", "all copies or substantial portions of the Software. # #", "interpreter is in debug mode.\"\"\" try: import sysconfig except ImportError:", "available !!! investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path, added: '%s'\", build_dir)", "%s\", numpy.version.version) try: import h5py except Exception as error: logger.warning(\"h5py", "software and associated documentation files (the \"Software\"), to deal #", "PROJECT_PATH if nothing is specified without_options = [a for a", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "python3 # coding: utf8 # /*########################################################################## # # Copyright (c)", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "the Software without restriction, including without limitation the rights #", "args += [PROJECT_PATH] argv = [\"--rootdir\", PROJECT_PATH] + args sys.exit(pytest.main(argv))", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "= os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home): return alt_home def import_project_module(project_name, project_dir):", "# Use built source build_dir = build_project(project_name, project_dir) if build_dir", "if p != \"--installed\"] # Run test on PROJECT_PATH if", "= \"30/09/2020\" __license__ = \"MIT\" import distutils.util import logging import", "\"\"\"Run python setup.py build for the project. Build directory can", "hasattr(sys, \"gettotalrefcount\") def build_project(name, root_dir): \"\"\"Run python setup.py build for", "else: logger.info(\"Numpy %s\", numpy.version.version) try: import h5py except Exception as", "p != \"--installed\"] # Run test on PROJECT_PATH if nothing", "try: module = importlib.import_module(project_name) except Exception: logger.error(\"Cannot run tests on", "sys.version_info[0], sys.version_info[1]) if is_debug_python(): architecture += \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") ==", "\"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess ended with rc= %s\", p.wait()) if", "name: %s\", PROJECT_NAME) project_module = import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION = getattr(project_module,", "Build directory can be modified by environment variables. :param str", "directory of the project :return: The path to the directory", "home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home = os.path.join(root_dir, \"build\", architecture)", "# of this software and associated documentation files (the \"Software\"),", "furnished to do so, subject to the following conditions: #", "to do so, subject to the following conditions: # #", "sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return True return hasattr(sys, \"gettotalrefcount\") def build_project(name,", "# The above copyright notice and this permission notice shall", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "return name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns true if the Python interpreter", "run the command. :return: The name of the project stored", "on Windows import pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR)", "a copy # of this software and associated documentation files", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "2.7 import distutils.sysconfig as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return True return", "\"\"\" logger.debug(\"Getting project name in %s\", root_dir) p = subprocess.Popen([sys.executable,", "__license__ = \"MIT\" import distutils.util import logging import os import", "logger.warning(\"Building %s to %s\", name, home) p = subprocess.Popen([sys.executable, \"setup.py\",", "notice shall be included in # all copies or substantial", "utf8 # /*########################################################################## # # Copyright (c) 2015-2021 European Synchrotron", "pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\",", "in root_dir. :param str root_dir: Directory where to run the", "os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "and this permission notice shall be included in # all", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "# # ###########################################################################*/ \"\"\"Run the tests of the project. This", "\"MIT\" import distutils.util import logging import os import subprocess import", "dependencies: coverage, lxml. \"\"\" __authors__ = [\"<NAME>\", \"<NAME>\"] __date__ =", "p = subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data", "unittest.TestSuite. Test coverage dependencies: coverage, lxml. \"\"\" __authors__ = [\"<NAME>\",", "following conditions: # # The above copyright notice and this", "if the Python interpreter is in debug mode.\"\"\" try: import", "to deal # in the Software without restriction, including without", "without_options = [a for a in args if not a.startswith(\"-\")]", "= import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION = getattr(project_module, 'version', '') PROJECT_PATH =", "conditions: # # The above copyright notice and this permission", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "project_module.__path__[0] def normalize_option(option): option_parts = option.split(os.path.sep) if option_parts == [\"src\",", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", ":return: The name of the project stored in root_dir \"\"\"", "is in debug mode.\"\"\" try: import sysconfig except ImportError: #", "tests on installed version: %s not installed or raising error.\",", "not a.startswith(\"-\")] if len(without_options) == 0: args += [PROJECT_PATH] argv", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "import h5py except Exception as error: logger.warning(\"h5py missing: %s\", error)", "\"\")) else: home = os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building %s to", "h5py except Exception as error: logger.warning(\"h5py missing: %s\", error) else:", "of the project. This script expects a suite function in", "numpy except Exception as error: logger.warning(\"Numpy missing: %s\", error) else:", "mode.\"\"\" try: import sysconfig except ImportError: # pragma nocover #", "subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess ended with rc= %s\",", "#!/usr/bin/env python3 # coding: utf8 # /*########################################################################## # # Copyright", "lxml. \"\"\" __authors__ = [\"<NAME>\", \"<NAME>\"] __date__ = \"30/09/2020\" __license__", "if option_parts == [\"src\", \"silx\"]: return PROJECT_PATH if option_parts[:2] ==", "Windows import pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project", "project. Build directory can be modified by environment variables. :param", "and/or sell # copies of the Software, and to permit", "were build was performed \"\"\" platform = distutils.util.get_platform() architecture =", "the rights # to use, copy, modify, merge, publish, distribute,", "in <project_package>.test, which returns a unittest.TestSuite. Test coverage dependencies: coverage,", "THE SOFTWARE. # # ###########################################################################*/ \"\"\"Run the tests of the", "p.wait()) if os.path.isdir(home): return home alt_home = os.path.join(os.path.dirname(home), \"lib\") if", "the command. :return: The name of the project stored in", "sysconfig.get_config_var(\"Py_DEBUG\"): return True return hasattr(sys, \"gettotalrefcount\") def build_project(name, root_dir): \"\"\"Run", "\"setup.py\", \"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess ended with rc= %s\", p.wait())", "be included in # all copies or substantial portions of", "of from the project directory\"\"\" if \"--installed\" in sys.argv: try:", "is hereby granted, free of charge, to any person obtaining", "= \"lib.%s-%i.%i\" % (platform, sys.version_info[0], sys.version_info[1]) if is_debug_python(): architecture +=", "home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\"))", "is_debug_python(): architecture += \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") == name: # we", "by running python setup.py --name in root_dir. :param str root_dir:", "for multiprocessing support on Windows import pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))", "\"\"\"Returns true if the Python interpreter is in debug mode.\"\"\"", "import os import subprocess import sys import importlib # Capture", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "%s\", PROJECT_NAME) project_module = import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION = getattr(project_module, 'version',", "= option.split(os.path.sep) if option_parts == [\"src\", \"silx\"]: return PROJECT_PATH if", "of the project. :param str root_dir: Root directory of the", "return True return hasattr(sys, \"gettotalrefcount\") def build_project(name, root_dir): \"\"\"Run python", "str name: Name of the project. :param str root_dir: Root", "person obtaining a copy # of this software and associated", "# # Permission is hereby granted, free of charge, to", "without restriction, including without limitation the rights # to use,", "logger.info(\"Python %s %s\", sys.version, tuple.__itemsize__ * 8) try: import numpy", "subject to the following conditions: # # The above copyright", "%s to %s\", name, home) p = subprocess.Popen([sys.executable, \"setup.py\", \"build\"],", "__authors__ = [\"<NAME>\", \"<NAME>\"] __date__ = \"30/09/2020\" __license__ = \"MIT\"", "directory can be modified by environment variables. :param str name:", "if __name__ == \"__main__\": # Needed for multiprocessing support on", "\"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home =", "import subprocess import sys import importlib # Capture all default", "missing: %s\", error) else: logger.info(\"h5py %s\", h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve", "os.path.isdir(alt_home): return alt_home def import_project_module(project_name, project_dir): \"\"\"Import project module, from", "args if not a.startswith(\"-\")] if len(without_options) == 0: args +=", "# coding: utf8 # /*########################################################################## # # Copyright (c) 2015-2021", "p.communicate() logger.debug(\"subprocess ended with rc= %s\", p.returncode) return name.split()[-1].decode('ascii') def", "import pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project name:", "sys.path, added: '%s'\", build_dir) module = importlib.import_module(project_name) return module if", "= \"MIT\" import distutils.util import logging import os import subprocess", "else: home = os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building %s to %s\",", "script expects a suite function in <project_package>.test, which returns a", "import distutils.util import logging import os import subprocess import sys", "%s\", error) else: logger.info(\"Numpy %s\", numpy.version.version) try: import h5py except", "os.path.join(PROJECT_PATH, *option_parts[2:]) return option args = [normalize_option(p) for p in", "default warnings logging.captureWarnings(True) import warnings warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING)", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "\"\"\"Import project module, from the system of from the project", "of the project stored in root_dir \"\"\" logger.debug(\"Getting project name", "# Python < 2.7 import distutils.sysconfig as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"):", "is not available !!! investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path, added:", "specified without_options = [a for a in args if not", "logger.debug(\"Getting project name in %s\", root_dir) p = subprocess.Popen([sys.executable, \"setup.py\",", "tests of the project. This script expects a suite function", "The path to the directory were build was performed \"\"\"", "sys.version, tuple.__itemsize__ * 8) try: import numpy except Exception as", "to %s\", name, home) p = subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False,", "home alt_home = os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home): return alt_home def", "installed or raising error.\", project_name) raise else: # Use built", "is specified without_options = [a for a in args if", "platform = distutils.util.get_platform() architecture = \"lib.%s-%i.%i\" % (platform, sys.version_info[0], sys.version_info[1])", "__date__ = \"30/09/2020\" __license__ = \"MIT\" import distutils.util import logging", "or substantial portions of the Software. # # THE SOFTWARE", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "# # Copyright (c) 2015-2021 European Synchrotron Radiation Facility #", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "*option_parts[2:]) return option args = [normalize_option(p) for p in sys.argv[1:]", "# Copyright (c) 2015-2021 European Synchrotron Radiation Facility # #", "PROJECT_DIR) PROJECT_VERSION = getattr(project_module, 'version', '') PROJECT_PATH = project_module.__path__[0] def", "logging.error(\"Built project is not available !!! investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched", "name of the project stored in root_dir \"\"\" logger.debug(\"Getting project", "rc= %s\", p.returncode) return name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns true if", "the tests of the project. This script expects a suite", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "build_project(name, root_dir): \"\"\"Run python setup.py build for the project. Build", "Python < 2.7 import distutils.sysconfig as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return", "\"gettotalrefcount\") def build_project(name, root_dir): \"\"\"Run python setup.py build for the", "Capture all default warnings logging.captureWarnings(True) import warnings warnings.simplefilter('default') logger =", "* 8) try: import numpy except Exception as error: logger.warning(\"Numpy", "missing: %s\", error) else: logger.info(\"Numpy %s\", numpy.version.version) try: import h5py", "architecture += \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") == name: # we are", "raising error.\", project_name) raise else: # Use built source build_dir", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "can be modified by environment variables. :param str name: Name", "elif os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home = os.path.join(root_dir,", "# Permission is hereby granted, free of charge, to any", "of charge, to any person obtaining a copy # of", "to run the command. :return: The name of the project", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "merge, publish, distribute, sublicense, and/or sell # copies of the", "'') PROJECT_PATH = project_module.__path__[0] def normalize_option(option): option_parts = option.split(os.path.sep) if", "<project_package>.test, which returns a unittest.TestSuite. Test coverage dependencies: coverage, lxml.", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\", PROJECT_NAME) project_module =", "import importlib # Capture all default warnings logging.captureWarnings(True) import warnings", "ended with rc= %s\", p.returncode) return name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns", "ImportError: # pragma nocover # Python < 2.7 import distutils.sysconfig", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "def get_project_name(root_dir): \"\"\"Retrieve project name by running python setup.py --name", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "os.environ.get(\"BUILDPYTHONPATH\"): home = os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home = os.path.join(root_dir, \"build\",", "project is not available !!! investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path,", "sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path, added: '%s'\", build_dir) module = importlib.import_module(project_name)", "stored in root_dir \"\"\" logger.debug(\"Getting project name in %s\", root_dir)", "[a for a in args if not a.startswith(\"-\")] if len(without_options)", "expects a suite function in <project_package>.test, which returns a unittest.TestSuite.", "logging import os import subprocess import sys import importlib #", "str root_dir: Root directory of the project :return: The path", "path to the directory were build was performed \"\"\" platform", "running python setup.py --name in root_dir. :param str root_dir: Directory", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "the directory were build was performed \"\"\" platform = distutils.util.get_platform()", "architecture = \"lib.%s-%i.%i\" % (platform, sys.version_info[0], sys.version_info[1]) if is_debug_python(): architecture", "__name__ == \"__main__\": # Needed for multiprocessing support on Windows", "PROJECT_PATH = project_module.__path__[0] def normalize_option(option): option_parts = option.split(os.path.sep) if option_parts", "if not a.startswith(\"-\")] if len(without_options) == 0: args += [PROJECT_PATH]", "logger.warning(\"h5py missing: %s\", error) else: logger.info(\"h5py %s\", h5py.version.version) def get_project_name(root_dir):", "so, subject to the following conditions: # # The above", "of the project :return: The path to the directory were", "subprocess import sys import importlib # Capture all default warnings", "\"\"\" __authors__ = [\"<NAME>\", \"<NAME>\"] __date__ = \"30/09/2020\" __license__ =", "if os.path.isdir(alt_home): return alt_home def import_project_module(project_name, project_dir): \"\"\"Import project module,", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "== [\"src\", \"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:]) return option args =", "suite function in <project_package>.test, which returns a unittest.TestSuite. Test coverage", "project stored in root_dir \"\"\" logger.debug(\"Getting project name in %s\",", "source build_dir = build_project(project_name, project_dir) if build_dir is None: logging.error(\"Built", "DEALINGS IN # THE SOFTWARE. # # ###########################################################################*/ \"\"\"Run the", "os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building %s to %s\", name, home) p", "the following conditions: # # The above copyright notice and", "project_dir): \"\"\"Import project module, from the system of from the", "if nothing is specified without_options = [a for a in", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "if build_dir is None: logging.error(\"Built project is not available !!!", "project. This script expects a suite function in <project_package>.test, which", "OR OTHER DEALINGS IN # THE SOFTWARE. # # ###########################################################################*/", "os.path.isdir(home): return home alt_home = os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home): return", "= importlib.import_module(project_name) return module if __name__ == \"__main__\": # Needed", "os.environ.get(\"PYBUILD_NAME\") == name: # we are in the debian packaging", "name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns true if the Python interpreter is", "# /*########################################################################## # # Copyright (c) 2015-2021 European Synchrotron Radiation", "nocover # Python < 2.7 import distutils.sysconfig as sysconfig if", "on installed version: %s not installed or raising error.\", project_name)", "the Software, and to permit persons to whom the Software", "importlib.import_module(project_name) return module if __name__ == \"__main__\": # Needed for", "[\"src\", \"silx\"]: return PROJECT_PATH if option_parts[:2] == [\"src\", \"silx\"]: return", "root_dir: Root directory of the project :return: The path to", "environment variables. :param str name: Name of the project. :param", "+= \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") == name: # we are in", "error) else: logger.info(\"Numpy %s\", numpy.version.version) try: import h5py except Exception", "%s\", p.wait()) if os.path.isdir(home): return home alt_home = os.path.join(os.path.dirname(home), \"lib\")", "Python interpreter is in debug mode.\"\"\" try: import sysconfig except", "in # all copies or substantial portions of the Software.", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "# pragma nocover # Python < 2.7 import distutils.sysconfig as", "persons to whom the Software is # furnished to do", "import warnings warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\",", "return alt_home def import_project_module(project_name, project_dir): \"\"\"Import project module, from the", "OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.", "associated documentation files (the \"Software\"), to deal # in the", "import sys import importlib # Capture all default warnings logging.captureWarnings(True)", "if option_parts[:2] == [\"src\", \"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:]) return option", "PROJECT_NAME) project_module = import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION = getattr(project_module, 'version', '')", "!!! investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path, added: '%s'\", build_dir) module", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "0: args += [PROJECT_PATH] argv = [\"--rootdir\", PROJECT_PATH] + args", "to any person obtaining a copy # of this software", "project_dir) if build_dir is None: logging.error(\"Built project is not available", "return PROJECT_PATH if option_parts[:2] == [\"src\", \"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:])", "import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION = getattr(project_module, 'version', '') PROJECT_PATH = project_module.__path__[0]", "%s\", error) else: logger.info(\"h5py %s\", h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve project", "[normalize_option(p) for p in sys.argv[1:] if p != \"--installed\"] #", "this software and associated documentation files (the \"Software\"), to deal", "of the Software, and to permit persons to whom the", "Facility # # Permission is hereby granted, free of charge,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "returns a unittest.TestSuite. Test coverage dependencies: coverage, lxml. \"\"\" __authors__", "option_parts = option.split(os.path.sep) if option_parts == [\"src\", \"silx\"]: return PROJECT_PATH", "[\"<NAME>\", \"<NAME>\"] __date__ = \"30/09/2020\" __license__ = \"MIT\" import distutils.util", "str root_dir: Directory where to run the command. :return: The", "project name by running python setup.py --name in root_dir. :param", "p.returncode) return name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns true if the Python", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "performed \"\"\" platform = distutils.util.get_platform() architecture = \"lib.%s-%i.%i\" % (platform,", "\"lib\") if os.path.isdir(alt_home): return alt_home def import_project_module(project_name, project_dir): \"\"\"Import project", "PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\", PROJECT_NAME) project_module = import_project_module(PROJECT_NAME,", "was performed \"\"\" platform = distutils.util.get_platform() architecture = \"lib.%s-%i.%i\" %", "\"--installed\" in sys.argv: try: module = importlib.import_module(project_name) except Exception: logger.error(\"Cannot", "where to run the command. :return: The name of the", "warnings warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\", sys.version,", "Software is # furnished to do so, subject to the", "= [\"<NAME>\", \"<NAME>\"] __date__ = \"30/09/2020\" __license__ = \"MIT\" import", "a.startswith(\"-\")] if len(without_options) == 0: args += [PROJECT_PATH] argv =", "\"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data = p.communicate() logger.debug(\"subprocess ended", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "whom the Software is # furnished to do so, subject", "sublicense, and/or sell # copies of the Software, and to", "\"silx\"]: return PROJECT_PATH if option_parts[:2] == [\"src\", \"silx\"]: return os.path.join(PROJECT_PATH,", "support on Windows import pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME =", "p = subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess ended with", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "substantial portions of the Software. # # THE SOFTWARE IS", "nothing is specified without_options = [a for a in args", "system of from the project directory\"\"\" if \"--installed\" in sys.argv:", ":param str root_dir: Root directory of the project :return: The", "do so, subject to the following conditions: # # The", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "True return hasattr(sys, \"gettotalrefcount\") def build_project(name, root_dir): \"\"\"Run python setup.py", "# we are in the debian packaging way home =", "in the Software without restriction, including without limitation the rights", "modified by environment variables. :param str name: Name of the", "(c) 2015-2021 European Synchrotron Radiation Facility # # Permission is", "architecture) logger.warning(\"Building %s to %s\", name, home) p = subprocess.Popen([sys.executable,", "== [\"src\", \"silx\"]: return PROJECT_PATH if option_parts[:2] == [\"src\", \"silx\"]:", "# furnished to do so, subject to the following conditions:", "% (platform, sys.version_info[0], sys.version_info[1]) if is_debug_python(): architecture += \"-pydebug\" if", "any person obtaining a copy # of this software and", "not available !!! investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path, added: '%s'\",", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "# Needed for multiprocessing support on Windows import pytest PROJECT_DIR", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "sys.version_info[1]) if is_debug_python(): architecture += \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") == name:", "cwd=root_dir) logger.debug(\"subprocess ended with rc= %s\", p.wait()) if os.path.isdir(home): return", "shall be included in # all copies or substantial portions", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "return option args = [normalize_option(p) for p in sys.argv[1:] if", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "< 2.7 import distutils.sysconfig as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return True", "be modified by environment variables. :param str name: Name of", "restriction, including without limitation the rights # to use, copy,", "except Exception as error: logger.warning(\"Numpy missing: %s\", error) else: logger.info(\"Numpy", ":param str name: Name of the project. :param str root_dir:", "Run test on PROJECT_PATH if nothing is specified without_options =", "THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #", "directory\"\"\" if \"--installed\" in sys.argv: try: module = importlib.import_module(project_name) except", "def build_project(name, root_dir): \"\"\"Run python setup.py build for the project.", "including without limitation the rights # to use, copy, modify,", "copyright notice and this permission notice shall be included in", "with rc= %s\", p.returncode) return name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns true", "# ###########################################################################*/ \"\"\"Run the tests of the project. This script", "in the debian packaging way home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif", ":return: The path to the directory were build was performed", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "free of charge, to any person obtaining a copy #", "multiprocessing support on Windows import pytest PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME", "files (the \"Software\"), to deal # in the Software without", ":param str root_dir: Directory where to run the command. :return:", "\"<NAME>\"] __date__ = \"30/09/2020\" __license__ = \"MIT\" import distutils.util import", "is_debug_python(): \"\"\"Returns true if the Python interpreter is in debug", "root_dir) p = subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE) name,", "build for the project. Build directory can be modified by", "os import subprocess import sys import importlib # Capture all", "module = importlib.import_module(project_name) return module if __name__ == \"__main__\": #", "run tests on installed version: %s not installed or raising", "Directory where to run the command. :return: The name of", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "project module, from the system of from the project directory\"\"\"", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "to the directory were build was performed \"\"\" platform =", "# Run test on PROJECT_PATH if nothing is specified without_options", "distutils.sysconfig as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return True return hasattr(sys, \"gettotalrefcount\")", "logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\", sys.version, tuple.__itemsize__ * 8) try: import", "the project :return: The path to the directory were build", "the project. This script expects a suite function in <project_package>.test,", "importlib.import_module(project_name) except Exception: logger.error(\"Cannot run tests on installed version: %s", "return os.path.join(PROJECT_PATH, *option_parts[2:]) return option args = [normalize_option(p) for p", "%s %s\", sys.version, tuple.__itemsize__ * 8) try: import numpy except", "coding: utf8 # /*########################################################################## # # Copyright (c) 2015-2021 European", "import_project_module(project_name, project_dir): \"\"\"Import project module, from the system of from", "= os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\", PROJECT_NAME) project_module", "except Exception: logger.error(\"Cannot run tests on installed version: %s not", "a unittest.TestSuite. Test coverage dependencies: coverage, lxml. \"\"\" __authors__ =", "\"build\", architecture) logger.warning(\"Building %s to %s\", name, home) p =", "error: logger.warning(\"h5py missing: %s\", error) else: logger.info(\"h5py %s\", h5py.version.version) def", "= build_project(project_name, project_dir) if build_dir is None: logging.error(\"Built project is", "# THE SOFTWARE. # # ###########################################################################*/ \"\"\"Run the tests of", "of this software and associated documentation files (the \"Software\"), to", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "normalize_option(option): option_parts = option.split(os.path.sep) if option_parts == [\"src\", \"silx\"]: return", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "logging.captureWarnings(True) import warnings warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "get_project_name(root_dir): \"\"\"Retrieve project name by running python setup.py --name in", "Name of the project. :param str root_dir: Root directory of", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "OTHER DEALINGS IN # THE SOFTWARE. # # ###########################################################################*/ \"\"\"Run", "raise else: # Use built source build_dir = build_project(project_name, project_dir)", "in sys.argv: try: module = importlib.import_module(project_name) except Exception: logger.error(\"Cannot run", "as error: logger.warning(\"h5py missing: %s\", error) else: logger.info(\"h5py %s\", h5py.version.version)", "PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) PROJECT_NAME = get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\", PROJECT_NAME)", "(the \"Software\"), to deal # in the Software without restriction,", "European Synchrotron Radiation Facility # # Permission is hereby granted,", "we are in the debian packaging way home = os.environ.get(\"PYTHONPATH\",", "shell=False, cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data = p.communicate() logger.debug(\"subprocess ended with", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "module, from the system of from the project directory\"\"\" if", "charge, to any person obtaining a copy # of this", "permit persons to whom the Software is # furnished to", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "the Software is # furnished to do so, subject to", "from the system of from the project directory\"\"\" if \"--installed\"", "%s\", p.returncode) return name.split()[-1].decode('ascii') def is_debug_python(): \"\"\"Returns true if the", "above copyright notice and this permission notice shall be included", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "len(without_options) == 0: args += [PROJECT_PATH] argv = [\"--rootdir\", PROJECT_PATH]", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "limitation the rights # to use, copy, modify, merge, publish,", "this permission notice shall be included in # all copies", "importlib # Capture all default warnings logging.captureWarnings(True) import warnings warnings.simplefilter('default')", "'%s'\", build_dir) module = importlib.import_module(project_name) return module if __name__ ==", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "\"\"\"Retrieve project name by running python setup.py --name in root_dir.", "setup.py build for the project. Build directory can be modified", "without limitation the rights # to use, copy, modify, merge,", "a suite function in <project_package>.test, which returns a unittest.TestSuite. Test", "sys import importlib # Capture all default warnings logging.captureWarnings(True) import", "packaging way home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home =", "logger.info(\"h5py %s\", h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve project name by running", "ended with rc= %s\", p.wait()) if os.path.isdir(home): return home alt_home", "test on PROJECT_PATH if nothing is specified without_options = [a", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "root_dir. :param str root_dir: Directory where to run the command.", "with rc= %s\", p.wait()) if os.path.isdir(home): return home alt_home =", "# in the Software without restriction, including without limitation the", "documentation files (the \"Software\"), to deal # in the Software", "for p in sys.argv[1:] if p != \"--installed\"] # Run", "try: import sysconfig except ImportError: # pragma nocover # Python", "copies or substantial portions of the Software. # # THE", "if sysconfig.get_config_var(\"Py_DEBUG\"): return True return hasattr(sys, \"gettotalrefcount\") def build_project(name, root_dir):", "as error: logger.warning(\"Numpy missing: %s\", error) else: logger.info(\"Numpy %s\", numpy.version.version)", "Exception as error: logger.warning(\"h5py missing: %s\", error) else: logger.info(\"h5py %s\",", "os.path.abspath(os.environ.get(\"BUILDPYTHONPATH\", \"\")) else: home = os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building %s", "= p.communicate() logger.debug(\"subprocess ended with rc= %s\", p.returncode) return name.split()[-1].decode('ascii')", "python setup.py --name in root_dir. :param str root_dir: Directory where", "root_dir): \"\"\"Run python setup.py build for the project. Build directory", "in root_dir \"\"\" logger.debug(\"Getting project name in %s\", root_dir) p", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "error: logger.warning(\"Numpy missing: %s\", error) else: logger.info(\"Numpy %s\", numpy.version.version) try:", "except ImportError: # pragma nocover # Python < 2.7 import", "sell # copies of the Software, and to permit persons", "= project_module.__path__[0] def normalize_option(option): option_parts = option.split(os.path.sep) if option_parts ==", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "build_dir = build_project(project_name, project_dir) if build_dir is None: logging.error(\"Built project", "def import_project_module(project_name, project_dir): \"\"\"Import project module, from the system of", "Radiation Facility # # Permission is hereby granted, free of", "shell=False, cwd=root_dir) logger.debug(\"subprocess ended with rc= %s\", p.wait()) if os.path.isdir(home):", "Needed for multiprocessing support on Windows import pytest PROJECT_DIR =", "on PROJECT_PATH if nothing is specified without_options = [a for", "'version', '') PROJECT_PATH = project_module.__path__[0] def normalize_option(option): option_parts = option.split(os.path.sep)", "Copyright (c) 2015-2021 European Synchrotron Radiation Facility # # Permission", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #", "or raising error.\", project_name) raise else: # Use built source", "publish, distribute, sublicense, and/or sell # copies of the Software,", "Exception: logger.error(\"Cannot run tests on installed version: %s not installed", "in args if not a.startswith(\"-\")] if len(without_options) == 0: args", "to the following conditions: # # The above copyright notice", "if len(without_options) == 0: args += [PROJECT_PATH] argv = [\"--rootdir\",", "from the project directory\"\"\" if \"--installed\" in sys.argv: try: module", "name, home) p = subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess", "%s\", name, home) p = subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False, cwd=root_dir)", "This script expects a suite function in <project_package>.test, which returns", "== name: # we are in the debian packaging way", "investigate\") sys.path.insert(0, build_dir) logger.warning(\"Patched sys.path, added: '%s'\", build_dir) module =", "USE OR OTHER DEALINGS IN # THE SOFTWARE. # #", "= get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\", PROJECT_NAME) project_module = import_project_module(PROJECT_NAME, PROJECT_DIR)", "= [a for a in args if not a.startswith(\"-\")] if", "for a in args if not a.startswith(\"-\")] if len(without_options) ==", "error.\", project_name) raise else: # Use built source build_dir =", "modify, merge, publish, distribute, sublicense, and/or sell # copies of", "%s\", h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve project name by running python", "stdout=subprocess.PIPE) name, _stderr_data = p.communicate() logger.debug(\"subprocess ended with rc= %s\",", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "= logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\", sys.version, tuple.__itemsize__ * 8)", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data = p.communicate() logger.debug(\"subprocess ended with rc=", "Software, and to permit persons to whom the Software is", "directory were build was performed \"\"\" platform = distutils.util.get_platform() architecture", "sysconfig except ImportError: # pragma nocover # Python < 2.7", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "option.split(os.path.sep) if option_parts == [\"src\", \"silx\"]: return PROJECT_PATH if option_parts[:2]", "--name in root_dir. :param str root_dir: Directory where to run", "the project. Build directory can be modified by environment variables.", "= os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building %s to %s\", name, home)", "home = os.path.join(root_dir, \"build\", architecture) logger.warning(\"Building %s to %s\", name,", "as sysconfig if sysconfig.get_config_var(\"Py_DEBUG\"): return True return hasattr(sys, \"gettotalrefcount\") def", "\"30/09/2020\" __license__ = \"MIT\" import distutils.util import logging import os", "alt_home = os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home): return alt_home def import_project_module(project_name,", "else: logger.info(\"h5py %s\", h5py.version.version) def get_project_name(root_dir): \"\"\"Retrieve project name by", "name: # we are in the debian packaging way home", "python setup.py build for the project. Build directory can be", "\"Software\"), to deal # in the Software without restriction, including", "in %s\", root_dir) p = subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False, cwd=root_dir,", "home) p = subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess ended", "alt_home def import_project_module(project_name, project_dir): \"\"\"Import project module, from the system", "getattr(project_module, 'version', '') PROJECT_PATH = project_module.__path__[0] def normalize_option(option): option_parts =", "if os.environ.get(\"PYBUILD_NAME\") == name: # we are in the debian", "debian packaging way home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"): home", "\"--installed\"] # Run test on PROJECT_PATH if nothing is specified", "p in sys.argv[1:] if p != \"--installed\"] # Run test", "= subprocess.Popen([sys.executable, \"setup.py\", \"build\"], shell=False, cwd=root_dir) logger.debug(\"subprocess ended with rc=", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "logger.info(\"Numpy %s\", numpy.version.version) try: import h5py except Exception as error:", "logger = logging.getLogger(\"run_tests\") logger.setLevel(logging.WARNING) logger.info(\"Python %s %s\", sys.version, tuple.__itemsize__ *", "(platform, sys.version_info[0], sys.version_info[1]) if is_debug_python(): architecture += \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\")", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "\"__main__\": # Needed for multiprocessing support on Windows import pytest", "build_dir is None: logging.error(\"Built project is not available !!! investigate\")", "def is_debug_python(): \"\"\"Returns true if the Python interpreter is in", "\"\"\" platform = distutils.util.get_platform() architecture = \"lib.%s-%i.%i\" % (platform, sys.version_info[0],", "option_parts == [\"src\", \"silx\"]: return PROJECT_PATH if option_parts[:2] == [\"src\",", "IN # THE SOFTWARE. # # ###########################################################################*/ \"\"\"Run the tests", "# copies of the Software, and to permit persons to", "which returns a unittest.TestSuite. Test coverage dependencies: coverage, lxml. \"\"\"", "SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE", "built source build_dir = build_project(project_name, project_dir) if build_dir is None:", "root_dir: Directory where to run the command. :return: The name", "name in %s\", root_dir) p = subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False,", "project name in %s\", root_dir) p = subprocess.Popen([sys.executable, \"setup.py\", \"--name\"],", "if \"--installed\" in sys.argv: try: module = importlib.import_module(project_name) except Exception:", "all default warnings logging.captureWarnings(True) import warnings warnings.simplefilter('default') logger = logging.getLogger(\"run_tests\")", "granted, free of charge, to any person obtaining a copy", "== \"__main__\": # Needed for multiprocessing support on Windows import", "[\"src\", \"silx\"]: return os.path.join(PROJECT_PATH, *option_parts[2:]) return option args = [normalize_option(p)", "= subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data =", "if is_debug_python(): architecture += \"-pydebug\" if os.environ.get(\"PYBUILD_NAME\") == name: #", "are in the debian packaging way home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1]", "!= \"--installed\"] # Run test on PROJECT_PATH if nothing is", "build_project(project_name, project_dir) if build_dir is None: logging.error(\"Built project is not", "obtaining a copy # of this software and associated documentation", "_stderr_data = p.communicate() logger.debug(\"subprocess ended with rc= %s\", p.returncode) return", "module if __name__ == \"__main__\": # Needed for multiprocessing support", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "name, _stderr_data = p.communicate() logger.debug(\"subprocess ended with rc= %s\", p.returncode)", "is # furnished to do so, subject to the following", "to whom the Software is # furnished to do so,", "is None: logging.error(\"Built project is not available !!! investigate\") sys.path.insert(0,", "copy # of this software and associated documentation files (the", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "the debian packaging way home = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)[-1] elif os.environ.get(\"BUILDPYTHONPATH\"):", "Permission is hereby granted, free of charge, to any person", "the system of from the project directory\"\"\" if \"--installed\" in", "by environment variables. :param str name: Name of the project.", "###########################################################################*/ \"\"\"Run the tests of the project. This script expects", "if os.path.isdir(home): return home alt_home = os.path.join(os.path.dirname(home), \"lib\") if os.path.isdir(alt_home):", "version: %s not installed or raising error.\", project_name) raise else:", "import sysconfig except ImportError: # pragma nocover # Python <", "= getattr(project_module, 'version', '') PROJECT_PATH = project_module.__path__[0] def normalize_option(option): option_parts", "import numpy except Exception as error: logger.warning(\"Numpy missing: %s\", error)", "not installed or raising error.\", project_name) raise else: # Use", "The above copyright notice and this permission notice shall be", "Root directory of the project :return: The path to the", "get_project_name(PROJECT_DIR) logger.info(\"Project name: %s\", PROJECT_NAME) project_module = import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION", "logger.debug(\"subprocess ended with rc= %s\", p.wait()) if os.path.isdir(home): return home", "logger.debug(\"subprocess ended with rc= %s\", p.returncode) return name.split()[-1].decode('ascii') def is_debug_python():", "logger.info(\"Project name: %s\", PROJECT_NAME) project_module = import_project_module(PROJECT_NAME, PROJECT_DIR) PROJECT_VERSION =", "the project directory\"\"\" if \"--installed\" in sys.argv: try: module =", "subprocess.Popen([sys.executable, \"setup.py\", \"--name\"], shell=False, cwd=root_dir, stdout=subprocess.PIPE) name, _stderr_data = p.communicate()", "return hasattr(sys, \"gettotalrefcount\") def build_project(name, root_dir): \"\"\"Run python setup.py build", "rc= %s\", p.wait()) if os.path.isdir(home): return home alt_home = os.path.join(os.path.dirname(home),", "coverage dependencies: coverage, lxml. \"\"\" __authors__ = [\"<NAME>\", \"<NAME>\"] __date__", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "tuple.__itemsize__ * 8) try: import numpy except Exception as error:", "Use built source build_dir = build_project(project_name, project_dir) if build_dir is", "PROJECT_VERSION = getattr(project_module, 'version', '') PROJECT_PATH = project_module.__path__[0] def normalize_option(option):", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "# Capture all default warnings logging.captureWarnings(True) import warnings warnings.simplefilter('default') logger", "name by running python setup.py --name in root_dir. :param str", "coverage, lxml. \"\"\" __authors__ = [\"<NAME>\", \"<NAME>\"] __date__ = \"30/09/2020\"", "= [normalize_option(p) for p in sys.argv[1:] if p != \"--installed\"]", "to permit persons to whom the Software is # furnished", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING" ]
[ "self._format_message(name, unic(self.error)) def _get_details(self): if isinstance(self.error, RobotError): return self.error.details return", "details = PythonErrorDetails \\ if not isinstance(exc_value, Throwable) else JavaErrorDetails", "2.0 (the \"License\"); # you may not use this file", "entry in self._ignored_java_trace: if location.startswith(entry): return True return False def", "Nokia Solutions and Networks # # Licensed under the Apache", "message, name): return message class PythonErrorDetails(_ErrorDetails): def _get_message(self): name =", "if JYTHON: from java.io import StringWriter, PrintWriter from java.lang import", "self._traceback def _get_details(self): raise NotImplementedError def _get_name(self, exc_type): try: return", "''.join(traceback.format_tb(tb)).rstrip() or ' None' def _is_excluded_traceback(self, traceback): if not self._exclude_robot_traces:", "if self._traceback is None: self._traceback = self._get_details() return self._traceback def", "details = '\\n'.join(line for line in output.toString().splitlines() if not self._is_ignored_stack_trace_line(line))", "not self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage() else: exc_msg = str(self.error) return", "False module = traceback.tb_frame.f_globals.get('__name__') return module and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails):", "raise NotImplementedError @property def traceback(self): if self._traceback is None: self._traceback", "or ' None' def _is_excluded_traceback(self, traceback): if not self._exclude_robot_traces: return", "line): if not line: return True res = self._java_trace_re.match(line) if", "def _get_message(self): raise NotImplementedError @property def traceback(self): if self._traceback is", "from java.lang import Throwable, OutOfMemoryError else: Throwable = () def", "and `error`, where `message` contains type and message of the", "self._clean_up_message(message, name) name = name.split('.')[-1] # Use only last part", "OutOfMemoryError def _get_details(self): # OOME.printStackTrace seems to throw NullPointerException if", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "= name.split('.')[-1] # Use only last part of the name", "def traceback(self): if self._traceback is None: self._traceback = self._get_details() return", "= exc_value self._exc_type = exc_type self._exc_traceback = exc_traceback self._exclude_robot_traces =", "last part of the name if not message: return name", "traceback/stack trace and `error` contains the original error instance. \"\"\"", "the original error, `traceback` contains the traceback/stack trace and `error`", "is None: self._message = self._get_message() return self._message def _get_message(self): raise", "if not self._exclude_robot_traces: return False module = traceback.tb_frame.f_globals.get('__name__') return module", "details.replace(msg, '', 1) return details def _is_ignored_stack_trace_line(self, line): if not", "= exclude_robot_traces self._message = None self._traceback = None @property def", "NotImplementedError def _get_name(self, exc_type): try: return exc_type.__name__ except AttributeError: return", "= unic(message or '') message = self._clean_up_message(message, name) name =", "`traceback` and `error`, where `message` contains type and message of", "self._traceback = self._get_details() return self._traceback def _get_details(self): raise NotImplementedError def", "name): return message class PythonErrorDetails(_ErrorDetails): def _get_message(self): name = self._get_name(self._exc_type)", "the name if not message: return name if self._is_generic_exception(name): return", "def _is_ignored_stack_trace_line(self, line): if not line: return True res =", "EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from java.io import StringWriter,", "self._get_details() return self._traceback def _get_details(self): raise NotImplementedError def _get_name(self, exc_type):", "getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self, message, name): return message class", "factory returns an object that wraps the last occurred exception", "use this file except in compliance with the License. #", "traceback): if not self._exclude_robot_traces: return False module = traceback.tb_frame.f_globals.get('__name__') return", "details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This", "1) return details def _is_ignored_stack_trace_line(self, line): if not line: return", "if self._java_trace_re.match(lines[-1]): lines.pop() else: break return '\\n'.join(lines) def _remove_exception_name(self, msg,", "return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message and details of", "def __init__(self, exc_type, exc_value, exc_traceback, exclude_robot_traces=True): self.error = exc_value self._exc_type", "res = self._java_trace_re.match(line) if res is None: return False location", "None self._traceback = None @property def message(self): if self._message is", "self._traceback is None: self._traceback = self._get_details() return self._traceback def _get_details(self):", "'') message = self._clean_up_message(message, name) name = name.split('.')[-1] # Use", "'%s: %s' % (name, message) def _is_generic_exception(self, name): return (name", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "OutOfMemoryError else: Throwable = () def get_error_message(): \"\"\"Returns error message", "RERAISED_EXCEPTIONS: raise exc_value details = PythonErrorDetails \\ if not isinstance(exc_value,", "name.split('.')[-1] # Use only last part of the name if", "License. # You may obtain a copy of the License", "self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self, exc_type): return exc_type is OutOfMemoryError def", "2008-2015 Nokia Solutions and Networks # # Licensed under the", "last):\\n' + self._get_traceback() def _get_traceback(self): tb = self._exc_traceback while tb", "line: return True res = self._java_trace_re.match(line) if res is None:", "exc_name = self._get_name(self._exc_type) # OOME.getMessage and even toString seem to", "os import re import sys import traceback from robot.errors import", "= details.replace(msg, '', 1) return details def _is_ignored_stack_trace_line(self, line): if", "under the License is distributed on an \"AS IS\" BASIS,", "the original error instance. \"\"\" exc_type, exc_value, exc_traceback = exc_info", "Use only last part of the name if not message:", "License for the specific language governing permissions and # limitations", "res.group(1) for entry in self._ignored_java_trace: if location.startswith(entry): return True return", "def _remove_stack_trace_lines(self, msg): lines = msg.splitlines() while lines: if self._java_trace_re.match(lines[-1]):", "Throwable, OutOfMemoryError else: Throwable = () def get_error_message(): \"\"\"Returns error", "= self._clean_up_message(message, name) name = name.split('.')[-1] # Use only last", "PrintWriter from java.lang import Throwable, OutOfMemoryError else: Throwable = ()", "if self._message is None: self._message = self._get_message() return self._message def", "self.error.details return 'Traceback (most recent call last):\\n' + self._get_traceback() def", "return details def _is_ignored_stack_trace_line(self, line): if not line: return True", "if not message: return name if self._is_generic_exception(name): return message return", ".platform import JYTHON, RERAISED_EXCEPTIONS from .unic import unic EXCLUDE_ROBOT_TRACES =", "JYTHON: from java.io import StringWriter, PrintWriter from java.lang import Throwable,", "= re.compile('^\\s+at (\\w.+)') _ignored_java_trace = ('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.')", "get messages from all exceptions originating outside the framework. \"\"\"", "class JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at (\\w.+)') _ignored_java_trace = ('org.python.', 'robot.running.',", "in compliance with the License. # You may obtain a", "'\\n'.join(lines) def _remove_exception_name(self, msg, name): tokens = msg.split(':', 1) if", "software # distributed under the License is distributed on an", "ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns", "AttributeError: return unic(exc_type) def _format_message(self, name, message): message = unic(message", "RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self, message, name): return", "message = unic(message or '') message = self._clean_up_message(message, name) name", "== 2 and tokens[0] == name: msg = tokens[1] return", "details def _is_ignored_stack_trace_line(self, line): if not line: return True res", "Throwable) else JavaErrorDetails return details(exc_type, exc_value, exc_traceback, exclude_robot_traces) class _ErrorDetails(object):", "import RobotError from .platform import JYTHON, RERAISED_EXCEPTIONS from .unic import", "'RuntimeException') def __init__(self, exc_type, exc_value, exc_traceback, exclude_robot_traces=True): self.error = exc_value", "return True return False def _clean_up_message(self, msg, name): msg =", "def _format_message(self, name, message): message = unic(message or '') message", "return self._message def _get_message(self): raise NotImplementedError @property def traceback(self): if", "where `message` contains type and message of the original error,", "original error, `traceback` contains the traceback/stack trace and `error` contains", "# Copyright 2008-2015 Nokia Solutions and Networks # # Licensed", "= None self._traceback = None @property def message(self): if self._message", "exc_type.__name__ except AttributeError: return unic(exc_type) def _format_message(self, name, message): message", "(most recent call last):\\n' + self._get_traceback() def _get_traceback(self): tb =", "_clean_up_message(self, msg, name): msg = self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip() def", "= exc_info or sys.exc_info() if exc_type in RERAISED_EXCEPTIONS: raise exc_value", "return True res = self._java_trace_re.match(line) if res is None: return", "@property def traceback(self): if self._traceback is None: self._traceback = self._get_details()", "def _get_message(self): name = self._get_name(self._exc_type) return self._format_message(name, unic(self.error)) def _get_details(self):", "import StringWriter, PrintWriter from java.lang import Throwable, OutOfMemoryError else: Throwable", "in self._ignored_java_trace: if location.startswith(entry): return True return False def _clean_up_message(self,", "except AttributeError: return unic(exc_type) def _format_message(self, name, message): message =", "lines.pop() else: break return '\\n'.join(lines) def _remove_exception_name(self, msg, name): tokens", "if len(tokens) == 2 and tokens[0] == name: msg =", "def _get_message(self): exc_name = self._get_name(self._exc_type) # OOME.getMessage and even toString", "return unic(exc_type) def _format_message(self, name, message): message = unic(message or", "if not line: return True res = self._java_trace_re.match(line) if res", "'') if msg: details = details.replace(msg, '', 1) return details", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "robot.errors import RobotError from .platform import JYTHON, RERAISED_EXCEPTIONS from .unic", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "message class PythonErrorDetails(_ErrorDetails): def _get_message(self): name = self._get_name(self._exc_type) return self._format_message(name,", "to in writing, software # distributed under the License is", "output = StringWriter() self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line for line in", "License. import os import re import sys import traceback from", "return name if self._is_generic_exception(name): return message return '%s: %s' %", "self._java_trace_re.match(line) if res is None: return False location = res.group(1)", "# See the License for the specific language governing permissions", "'RuntimeError', 'RuntimeException') def __init__(self, exc_type, exc_value, exc_traceback, exclude_robot_traces=True): self.error =", "= self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self, msg): lines =", "or agreed to in writing, software # distributed under the", "self._is_out_of_memory_error(self._exc_type): return '' output = StringWriter() self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line", "exc_info or sys.exc_info() if exc_type in RERAISED_EXCEPTIONS: raise exc_value details", "required by applicable law or agreed to in writing, software", "Throwable = () def get_error_message(): \"\"\"Returns error message of the", "def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message and details of the last", "@property def message(self): if self._message is None: self._message = self._get_message()", "contains the original error instance. \"\"\" exc_type, exc_value, exc_traceback =", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "self._message is None: self._message = self._get_message() return self._message def _get_message(self):", "with the License. # You may obtain a copy of", "in output.toString().splitlines() if not self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage() or '')", "msg = self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self, msg): lines", "module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at (\\w.+)') _ignored_java_trace = ('org.python.',", "original error instance. \"\"\" exc_type, exc_value, exc_traceback = exc_info or", "exc_type, exc_value, exc_traceback = exc_info or sys.exc_info() if exc_type in", "lines: if self._java_trace_re.match(lines[-1]): lines.pop() else: break return '\\n'.join(lines) def _remove_exception_name(self,", "self._exclude_robot_traces = exclude_robot_traces self._message = None self._traceback = None @property", "NotImplementedError @property def traceback(self): if self._traceback is None: self._traceback =", "NullPointerException if self._is_out_of_memory_error(self._exc_type): return '' output = StringWriter() self.error.printStackTrace(PrintWriter(output)) details", "= msg.split(':', 1) if len(tokens) == 2 and tokens[0] ==", "('AssertionError', 'AssertionFailedError', 'Exception', 'Error', 'RuntimeError', 'RuntimeException') def __init__(self, exc_type, exc_value,", "'' output = StringWriter() self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line for line", "return False def _clean_up_message(self, msg, name): msg = self._remove_stack_trace_lines(msg) return", "if not self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage() or '') if msg:", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "_generic_exception_names = ('AssertionError', 'AssertionFailedError', 'Exception', 'Error', 'RuntimeError', 'RuntimeException') def __init__(self,", "str(self.error) return self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self, exc_type): return exc_type is", "True return False def _clean_up_message(self, msg, name): msg = self._remove_stack_trace_lines(msg)", "distributed under the License is distributed on an \"AS IS\"", "= unic(self.error.getMessage() or '') if msg: details = details.replace(msg, '',", "if location.startswith(entry): return True return False def _clean_up_message(self, msg, name):", "lines = msg.splitlines() while lines: if self._java_trace_re.match(lines[-1]): lines.pop() else: break", "= exc_traceback self._exclude_robot_traces = exclude_robot_traces self._message = None self._traceback =", "def _remove_exception_name(self, msg, name): tokens = msg.split(':', 1) if len(tokens)", "throw NullPointerException if self._is_out_of_memory_error(self._exc_type): return '' output = StringWriter() self.error.printStackTrace(PrintWriter(output))", "name, message): message = unic(message or '') message = self._clean_up_message(message,", "express or implied. # See the License for the specific", "isinstance(exc_value, Throwable) else JavaErrorDetails return details(exc_type, exc_value, exc_traceback, exclude_robot_traces) class", "except in compliance with the License. # You may obtain", "the License. import os import re import sys import traceback", "None: self._message = self._get_message() return self._message def _get_message(self): raise NotImplementedError", "message: return name if self._is_generic_exception(name): return message return '%s: %s'", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "# OOME.printStackTrace seems to throw NullPointerException if self._is_out_of_memory_error(self._exc_type): return ''", "not use this file except in compliance with the License.", "= self._get_name(self._exc_type) return self._format_message(name, unic(self.error)) def _get_details(self): if isinstance(self.error, RobotError):", "from .unic import unic EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON:", "writing, software # distributed under the License is distributed on", "an object that wraps the last occurred exception It has", "_get_message(self): name = self._get_name(self._exc_type) return self._format_message(name, unic(self.error)) def _get_details(self): if", "msg = unic(self.error.getMessage() or '') if msg: details = details.replace(msg,", "you may not use this file except in compliance with", "not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from java.io import StringWriter, PrintWriter from", "else JavaErrorDetails return details(exc_type, exc_value, exc_traceback, exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "and self._is_excluded_traceback(tb): tb = tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or ' None'", "exception It has attributes `message`, `traceback` and `error`, where `message`", "and Networks # # Licensed under the Apache License, Version", "occurred exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback def ErrorDetails(exc_info=None,", "self._is_generic_exception(name): return message return '%s: %s' % (name, message) def", "def _get_name(self, exc_type): try: return exc_type.__name__ except AttributeError: return unic(exc_type)", "CONDITIONS OF ANY KIND, either express or implied. # See", "permissions and # limitations under the License. import os import", "error, `traceback` contains the traceback/stack trace and `error` contains the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "JavaErrorDetails return details(exc_type, exc_value, exc_traceback, exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names =", "return details(exc_type, exc_value, exc_traceback, exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names = ('AssertionError',", "get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message and details of the last occurred", "return '' output = StringWriter() self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line for", "exc_type self._exc_traceback = exc_traceback self._exclude_robot_traces = exclude_robot_traces self._message = None", "error instance. \"\"\" exc_type, exc_value, exc_traceback = exc_info or sys.exc_info()", "the traceback/stack trace and `error` contains the original error instance.", "from all exceptions originating outside the framework. \"\"\" return ErrorDetails().message", "None: return False location = res.group(1) for entry in self._ignored_java_trace:", "while lines: if self._java_trace_re.match(lines[-1]): lines.pop() else: break return '\\n'.join(lines) def", "or isinstance(self.error, RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self, message,", "not self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage() or '') if msg: details", "_is_excluded_traceback(self, traceback): if not self._exclude_robot_traces: return False module = traceback.tb_frame.f_globals.get('__name__')", "%s' % (name, message) def _is_generic_exception(self, name): return (name in", "not message: return name if self._is_generic_exception(name): return message return '%s:", "raise exc_value details = PythonErrorDetails \\ if not isinstance(exc_value, Throwable)", "handles also exceptions containing unicode messages. Thus it MUST be", "= res.group(1) for entry in self._ignored_java_trace: if location.startswith(entry): return True", "self._ignored_java_trace: if location.startswith(entry): return True return False def _clean_up_message(self, msg,", "module and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at (\\w.+)') _ignored_java_trace", "exc_msg) def _is_out_of_memory_error(self, exc_type): return exc_type is OutOfMemoryError def _get_details(self):", "`error`, where `message` contains type and message of the original", "in self._generic_exception_names or isinstance(self.error, RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def", "OR CONDITIONS OF ANY KIND, either express or implied. #", "governing permissions and # limitations under the License. import os", "exception. This method handles also exceptions containing unicode messages. Thus", "'Error', 'RuntimeError', 'RuntimeException') def __init__(self, exc_type, exc_value, exc_traceback, exclude_robot_traces=True): self.error", "the License is distributed on an \"AS IS\" BASIS, #", "return ''.join(traceback.format_tb(tb)).rstrip() or ' None' def _is_excluded_traceback(self, traceback): if not", "return exc_type.__name__ except AttributeError: return unic(exc_type) def _format_message(self, name, message):", "return module and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at (\\w.+)')", "is None: return False location = res.group(1) for entry in", "\"\"\"Returns error message of the last occurred exception. This method", "occurred exception It has attributes `message`, `traceback` and `error`, where", "unic(message or '') message = self._clean_up_message(message, name) name = name.split('.')[-1]", "self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self, msg): lines = msg.splitlines() while lines:", "None' def _is_excluded_traceback(self, traceback): if not self._exclude_robot_traces: return False module", "even toString seem to throw NullPointerException if not self._is_out_of_memory_error(self._exc_type): exc_msg", "exclude_robot_traces self._message = None self._traceback = None @property def message(self):", "unic(exc_type) def _format_message(self, name, message): message = unic(message or '')", "self._exclude_robot_traces: return False module = traceback.tb_frame.f_globals.get('__name__') return module and module.startswith('robot.')", "self._java_trace_re.match(lines[-1]): lines.pop() else: break return '\\n'.join(lines) def _remove_exception_name(self, msg, name):", "not isinstance(exc_value, Throwable) else JavaErrorDetails return details(exc_type, exc_value, exc_traceback, exclude_robot_traces)", "of the name if not message: return name if self._is_generic_exception(name):", "if self._is_out_of_memory_error(self._exc_type): return '' output = StringWriter() self.error.printStackTrace(PrintWriter(output)) details =", "return '\\n'.join(lines) def _remove_exception_name(self, msg, name): tokens = msg.split(':', 1)", "last occurred exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback def", "else: exc_msg = str(self.error) return self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self, exc_type):", "law or agreed to in writing, software # distributed under", "res is None: return False location = res.group(1) for entry", "line in output.toString().splitlines() if not self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage() or", "to get messages from all exceptions originating outside the framework.", "is None: self._traceback = self._get_details() return self._traceback def _get_details(self): raise", "StringWriter() self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line for line in output.toString().splitlines() if", "= () def get_error_message(): \"\"\"Returns error message of the last", "java.io import StringWriter, PrintWriter from java.lang import Throwable, OutOfMemoryError else:", "def _is_excluded_traceback(self, traceback): if not self._exclude_robot_traces: return False module =", "call last):\\n' + self._get_traceback() def _get_traceback(self): tb = self._exc_traceback while", "# OOME.getMessage and even toString seem to throw NullPointerException if", "NullPointerException if not self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage() else: exc_msg =", ".unic import unic EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from", "= None @property def message(self): if self._message is None: self._message", "return message class PythonErrorDetails(_ErrorDetails): def _get_message(self): name = self._get_name(self._exc_type) return", "= self.error.getMessage() else: exc_msg = str(self.error) return self._format_message(exc_name, exc_msg) def", "def _get_traceback(self): tb = self._exc_traceback while tb and self._is_excluded_traceback(tb): tb", "contains the traceback/stack trace and `error` contains the original error", "re.compile('^\\s+at (\\w.+)') _ignored_java_trace = ('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def", "RERAISED_EXCEPTIONS from .unic import unic EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES') if", "error message and details of the last occurred exception.\"\"\" details", "def _get_details(self): raise NotImplementedError def _get_name(self, exc_type): try: return exc_type.__name__", "self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage() or '') if msg: details =", "throw NullPointerException if not self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage() else: exc_msg", "msg: details = details.replace(msg, '', 1) return details def _is_ignored_stack_trace_line(self,", "may obtain a copy of the License at # #", "exc_value, exc_traceback, exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names = ('AssertionError', 'AssertionFailedError', 'Exception',", "or sys.exc_info() if exc_type in RERAISED_EXCEPTIONS: raise exc_value details =", "(name, message) def _is_generic_exception(self, name): return (name in self._generic_exception_names or", "unic(self.error.getMessage() or '') if msg: details = details.replace(msg, '', 1)", "if res is None: return False location = res.group(1) for", "False)) def _clean_up_message(self, message, name): return message class PythonErrorDetails(_ErrorDetails): def", "1) if len(tokens) == 2 and tokens[0] == name: msg", "originating outside the framework. \"\"\" return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns", "`message`, `traceback` and `error`, where `message` contains type and message", "and `error` contains the original error instance. \"\"\" exc_type, exc_value,", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "the last occurred exception It has attributes `message`, `traceback` and", "instance. \"\"\" exc_type, exc_value, exc_traceback = exc_info or sys.exc_info() if", "message): message = unic(message or '') message = self._clean_up_message(message, name)", "exc_traceback = exc_info or sys.exc_info() if exc_type in RERAISED_EXCEPTIONS: raise", "may not use this file except in compliance with the", "'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self, message, name): return message class PythonErrorDetails(_ErrorDetails):", "None: self._traceback = self._get_details() return self._traceback def _get_details(self): raise NotImplementedError", "of the original error, `traceback` contains the traceback/stack trace and", "self._exc_type = exc_type self._exc_traceback = exc_traceback self._exclude_robot_traces = exclude_robot_traces self._message", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "recent call last):\\n' + self._get_traceback() def _get_traceback(self): tb = self._exc_traceback", "this file except in compliance with the License. # You", "False location = res.group(1) for entry in self._ignored_java_trace: if location.startswith(entry):", "_get_details(self): if isinstance(self.error, RobotError): return self.error.details return 'Traceback (most recent", "unic(self.error)) def _get_details(self): if isinstance(self.error, RobotError): return self.error.details return 'Traceback", "only last part of the name if not message: return", "# limitations under the License. import os import re import", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "that wraps the last occurred exception It has attributes `message`,", "_get_message(self): raise NotImplementedError @property def traceback(self): if self._traceback is None:", "(name in self._generic_exception_names or isinstance(self.error, RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False))", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "% (name, message) def _is_generic_exception(self, name): return (name in self._generic_exception_names", "seems to throw NullPointerException if self._is_out_of_memory_error(self._exc_type): return '' output =", "= ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory", "under the License. import os import re import sys import", "JYTHON, RERAISED_EXCEPTIONS from .unic import unic EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES')", "details(exc_type, exc_value, exc_traceback, exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names = ('AssertionError', 'AssertionFailedError',", "exc_msg = str(self.error) return self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self, exc_type): return", "_format_message(self, name, message): message = unic(message or '') message =", "_is_out_of_memory_error(self, exc_type): return exc_type is OutOfMemoryError def _get_details(self): # OOME.printStackTrace", "is OutOfMemoryError def _get_details(self): # OOME.printStackTrace seems to throw NullPointerException", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "details = details.replace(msg, '', 1) return details def _is_ignored_stack_trace_line(self, line):", "break return '\\n'.join(lines) def _remove_exception_name(self, msg, name): tokens = msg.split(':',", "(\\w.+)') _ignored_java_trace = ('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def _get_message(self):", "name) name = name.split('.')[-1] # Use only last part of", "self._exc_traceback = exc_traceback self._exclude_robot_traces = exclude_robot_traces self._message = None self._traceback", "message return '%s: %s' % (name, message) def _is_generic_exception(self, name):", "self._generic_exception_names or isinstance(self.error, RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self,", "or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self, message, name): return message", "_ignored_java_trace = ('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def _get_message(self): exc_name", "import os import re import sys import traceback from robot.errors", "'\\n'.join(line for line in output.toString().splitlines() if not self._is_ignored_stack_trace_line(line)) msg =", "location = res.group(1) for entry in self._ignored_java_trace: if location.startswith(entry): return", "\\ if not isinstance(exc_value, Throwable) else JavaErrorDetails return details(exc_type, exc_value,", "def _get_details(self): # OOME.printStackTrace seems to throw NullPointerException if self._is_out_of_memory_error(self._exc_type):", "RobotError): return self.error.details return 'Traceback (most recent call last):\\n' +", "message = self._clean_up_message(message, name) name = name.split('.')[-1] # Use only", "+ self._get_traceback() def _get_traceback(self): tb = self._exc_traceback while tb and", "self.error.getMessage() else: exc_msg = str(self.error) return self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self,", "language governing permissions and # limitations under the License. import", "def _clean_up_message(self, msg, name): msg = self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip()", "return (name in self._generic_exception_names or isinstance(self.error, RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME',", "Copyright 2008-2015 Nokia Solutions and Networks # # Licensed under", "import JYTHON, RERAISED_EXCEPTIONS from .unic import unic EXCLUDE_ROBOT_TRACES = not", "name): msg = self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self, msg):", "occurred exception. This method handles also exceptions containing unicode messages.", "= '\\n'.join(line for line in output.toString().splitlines() if not self._is_ignored_stack_trace_line(line)) msg", "msg): lines = msg.splitlines() while lines: if self._java_trace_re.match(lines[-1]): lines.pop() else:", "sys.exc_info() if exc_type in RERAISED_EXCEPTIONS: raise exc_value details = PythonErrorDetails", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "self._message = self._get_message() return self._message def _get_message(self): raise NotImplementedError @property", "\"\"\" return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message and details", "or implied. # See the License for the specific language", "os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from java.io import StringWriter, PrintWriter from java.lang", "_remove_exception_name(self, msg, name): tokens = msg.split(':', 1) if len(tokens) ==", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "traceback from robot.errors import RobotError from .platform import JYTHON, RERAISED_EXCEPTIONS", "self._message def _get_message(self): raise NotImplementedError @property def traceback(self): if self._traceback", "or '') if msg: details = details.replace(msg, '', 1) return", "class _ErrorDetails(object): _generic_exception_names = ('AssertionError', 'AssertionFailedError', 'Exception', 'Error', 'RuntimeError', 'RuntimeException')", "exc_traceback, exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names = ('AssertionError', 'AssertionFailedError', 'Exception', 'Error',", "self.error = exc_value self._exc_type = exc_type self._exc_traceback = exc_traceback self._exclude_robot_traces", "() def get_error_message(): \"\"\"Returns error message of the last occurred", "Thus it MUST be used to get messages from all", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "tb = tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or ' None' def _is_excluded_traceback(self,", "error message of the last occurred exception. This method handles", "else: break return '\\n'.join(lines) def _remove_exception_name(self, msg, name): tokens =", "of the last occurred exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message,", "self._get_name(self._exc_type) # OOME.getMessage and even toString seem to throw NullPointerException", "= exc_type self._exc_traceback = exc_traceback self._exclude_robot_traces = exclude_robot_traces self._message =", "return self._format_message(name, unic(self.error)) def _get_details(self): if isinstance(self.error, RobotError): return self.error.details", "self._get_name(self._exc_type) return self._format_message(name, unic(self.error)) def _get_details(self): if isinstance(self.error, RobotError): return", "= tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or ' None' def _is_excluded_traceback(self, traceback):", "import traceback from robot.errors import RobotError from .platform import JYTHON,", "name): tokens = msg.split(':', 1) if len(tokens) == 2 and", "be used to get messages from all exceptions originating outside", "details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns an object that", "(the \"License\"); # you may not use this file except", "self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line for line in output.toString().splitlines() if not", "exc_msg = self.error.getMessage() else: exc_msg = str(self.error) return self._format_message(exc_name, exc_msg)", "# you may not use this file except in compliance", "_is_ignored_stack_trace_line(self, line): if not line: return True res = self._java_trace_re.match(line)", "returns an object that wraps the last occurred exception It", "if exc_type in RERAISED_EXCEPTIONS: raise exc_value details = PythonErrorDetails \\", "\"\"\"Returns error message and details of the last occurred exception.\"\"\"", "= StringWriter() self.error.printStackTrace(PrintWriter(output)) details = '\\n'.join(line for line in output.toString().splitlines()", "exceptions originating outside the framework. \"\"\" return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES):", "PythonErrorDetails \\ if not isinstance(exc_value, Throwable) else JavaErrorDetails return details(exc_type,", "ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message and details of the", "_get_details(self): raise NotImplementedError def _get_name(self, exc_type): try: return exc_type.__name__ except", "return message return '%s: %s' % (name, message) def _is_generic_exception(self,", "exc_value self._exc_type = exc_type self._exc_traceback = exc_traceback self._exclude_robot_traces = exclude_robot_traces", "return 'Traceback (most recent call last):\\n' + self._get_traceback() def _get_traceback(self):", "from java.io import StringWriter, PrintWriter from java.lang import Throwable, OutOfMemoryError", "# # Unless required by applicable law or agreed to", "_get_details(self): # OOME.printStackTrace seems to throw NullPointerException if self._is_out_of_memory_error(self._exc_type): return", "# Use only last part of the name if not", "tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or ' None' def _is_excluded_traceback(self, traceback): if", "msg, name): msg = self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self,", "framework. \"\"\" return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message and", "get_error_message(): \"\"\"Returns error message of the last occurred exception. This", "exclude_robot_traces) class _ErrorDetails(object): _generic_exception_names = ('AssertionError', 'AssertionFailedError', 'Exception', 'Error', 'RuntimeError',", "'Traceback (most recent call last):\\n' + self._get_traceback() def _get_traceback(self): tb", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Version 2.0 (the \"License\"); # you may not use this", "attributes `message`, `traceback` and `error`, where `message` contains type and", "isinstance(self.error, RobotError): return self.error.details return 'Traceback (most recent call last):\\n'", "'sun.reflect.', 'java.lang.reflect.') def _get_message(self): exc_name = self._get_name(self._exc_type) # OOME.getMessage and", "method handles also exceptions containing unicode messages. Thus it MUST", "MUST be used to get messages from all exceptions originating", "def _clean_up_message(self, message, name): return message class PythonErrorDetails(_ErrorDetails): def _get_message(self):", "for line in output.toString().splitlines() if not self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage()", "the last occurred exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback", "in RERAISED_EXCEPTIONS: raise exc_value details = PythonErrorDetails \\ if not", "import re import sys import traceback from robot.errors import RobotError", "It has attributes `message`, `traceback` and `error`, where `message` contains", "implied. # See the License for the specific language governing", "message(self): if self._message is None: self._message = self._get_message() return self._message", "exc_type): try: return exc_type.__name__ except AttributeError: return unic(exc_type) def _format_message(self,", "wraps the last occurred exception It has attributes `message`, `traceback`", "under the Apache License, Version 2.0 (the \"License\"); # you", "import sys import traceback from robot.errors import RobotError from .platform", "RobotError from .platform import JYTHON, RERAISED_EXCEPTIONS from .unic import unic", "else: Throwable = () def get_error_message(): \"\"\"Returns error message of", "name): return (name in self._generic_exception_names or isinstance(self.error, RobotError) or getattr(self.error,", "tb and self._is_excluded_traceback(tb): tb = tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or '", "'java.lang.reflect.') def _get_message(self): exc_name = self._get_name(self._exc_type) # OOME.getMessage and even", "'', 1) return details def _is_ignored_stack_trace_line(self, line): if not line:", "by applicable law or agreed to in writing, software #", "'Exception', 'Error', 'RuntimeError', 'RuntimeException') def __init__(self, exc_type, exc_value, exc_traceback, exclude_robot_traces=True):", "if not self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage() else: exc_msg = str(self.error)", "message of the last occurred exception. This method handles also", "message of the original error, `traceback` contains the traceback/stack trace", "_get_message(self): exc_name = self._get_name(self._exc_type) # OOME.getMessage and even toString seem", "or '') message = self._clean_up_message(message, name) name = name.split('.')[-1] #", "output.toString().splitlines() if not self._is_ignored_stack_trace_line(line)) msg = unic(self.error.getMessage() or '') if", "StringWriter, PrintWriter from java.lang import Throwable, OutOfMemoryError else: Throwable =", "traceback(self): if self._traceback is None: self._traceback = self._get_details() return self._traceback", "outside the framework. \"\"\" return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error", "to throw NullPointerException if not self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage() else:", "= not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from java.io import StringWriter, PrintWriter", "messages. Thus it MUST be used to get messages from", "exc_type): return exc_type is OutOfMemoryError def _get_details(self): # OOME.printStackTrace seems", "def _is_generic_exception(self, name): return (name in self._generic_exception_names or isinstance(self.error, RobotError)", "__init__(self, exc_type, exc_value, exc_traceback, exclude_robot_traces=True): self.error = exc_value self._exc_type =", "also exceptions containing unicode messages. Thus it MUST be used", "_clean_up_message(self, message, name): return message class PythonErrorDetails(_ErrorDetails): def _get_message(self): name", "unic EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from java.io import", "\"\"\"This factory returns an object that wraps the last occurred", "part of the name if not message: return name if", "name = self._get_name(self._exc_type) return self._format_message(name, unic(self.error)) def _get_details(self): if isinstance(self.error,", "while tb and self._is_excluded_traceback(tb): tb = tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or", "used to get messages from all exceptions originating outside the", "unicode messages. Thus it MUST be used to get messages", "None @property def message(self): if self._message is None: self._message =", "details of the last occurred exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return", "module = traceback.tb_frame.f_globals.get('__name__') return module and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re", "exc_traceback self._exclude_robot_traces = exclude_robot_traces self._message = None self._traceback = None", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "to throw NullPointerException if self._is_out_of_memory_error(self._exc_type): return '' output = StringWriter()", "= ('AssertionError', 'AssertionFailedError', 'Exception', 'Error', 'RuntimeError', 'RuntimeException') def __init__(self, exc_type,", "Unless required by applicable law or agreed to in writing,", "exc_type in RERAISED_EXCEPTIONS: raise exc_value details = PythonErrorDetails \\ if", "JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at (\\w.+)') _ignored_java_trace = ('org.python.', 'robot.running.', 'robot$py.',", "= self._get_message() return self._message def _get_message(self): raise NotImplementedError @property def", "exc_value, exc_traceback, exclude_robot_traces=True): self.error = exc_value self._exc_type = exc_type self._exc_traceback", "_is_generic_exception(self, name): return (name in self._generic_exception_names or isinstance(self.error, RobotError) or", "OOME.getMessage and even toString seem to throw NullPointerException if not", "self._get_traceback() def _get_traceback(self): tb = self._exc_traceback while tb and self._is_excluded_traceback(tb):", "the specific language governing permissions and # limitations under the", "raise NotImplementedError def _get_name(self, exc_type): try: return exc_type.__name__ except AttributeError:", "from robot.errors import RobotError from .platform import JYTHON, RERAISED_EXCEPTIONS from", "applicable law or agreed to in writing, software # distributed", "if isinstance(self.error, RobotError): return self.error.details return 'Traceback (most recent call", "contains type and message of the original error, `traceback` contains", "self._is_excluded_traceback(tb): tb = tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip() or ' None' def", "= self._get_details() return self._traceback def _get_details(self): raise NotImplementedError def _get_name(self,", "_remove_stack_trace_lines(self, msg): lines = msg.splitlines() while lines: if self._java_trace_re.match(lines[-1]): lines.pop()", "Solutions and Networks # # Licensed under the Apache License,", "def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns an object that wraps", "return self._traceback def _get_details(self): raise NotImplementedError def _get_name(self, exc_type): try:", "'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def _get_message(self): exc_name = self._get_name(self._exc_type) # OOME.getMessage", "msg, name): tokens = msg.split(':', 1) if len(tokens) == 2", "of the last occurred exception. This method handles also exceptions", "exc_type is OutOfMemoryError def _get_details(self): # OOME.printStackTrace seems to throw", "`error` contains the original error instance. \"\"\" exc_type, exc_value, exc_traceback", "exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return details.message, details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES):", "the framework. \"\"\" return ErrorDetails().message def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"Returns error message", "isinstance(self.error, RobotError) or getattr(self.error, 'ROBOT_SUPPRESS_NAME', False)) def _clean_up_message(self, message, name):", "in writing, software # distributed under the License is distributed", "`traceback` contains the traceback/stack trace and `error` contains the original", "2 and tokens[0] == name: msg = tokens[1] return msg", "name = name.split('.')[-1] # Use only last part of the", "from .platform import JYTHON, RERAISED_EXCEPTIONS from .unic import unic EXCLUDE_ROBOT_TRACES", "return self.error.details return 'Traceback (most recent call last):\\n' + self._get_traceback()", "_java_trace_re = re.compile('^\\s+at (\\w.+)') _ignored_java_trace = ('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.',", "= str(self.error) return self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self, exc_type): return exc_type", "= self._java_trace_re.match(line) if res is None: return False location =", "name if not message: return name if self._is_generic_exception(name): return message", "object that wraps the last occurred exception It has attributes", "exc_value details = PythonErrorDetails \\ if not isinstance(exc_value, Throwable) else", "= msg.splitlines() while lines: if self._java_trace_re.match(lines[-1]): lines.pop() else: break return", "tb = self._exc_traceback while tb and self._is_excluded_traceback(tb): tb = tb.tb_next", "return exc_type is OutOfMemoryError def _get_details(self): # OOME.printStackTrace seems to", "and details of the last occurred exception.\"\"\" details = ErrorDetails(exclude_robot_traces=exclude_robot_traces)", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "return '%s: %s' % (name, message) def _is_generic_exception(self, name): return", "License, Version 2.0 (the \"License\"); # you may not use", "_ErrorDetails(object): _generic_exception_names = ('AssertionError', 'AssertionFailedError', 'Exception', 'Error', 'RuntimeError', 'RuntimeException') def", "re import sys import traceback from robot.errors import RobotError from", "# You may obtain a copy of the License at", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "def message(self): if self._message is None: self._message = self._get_message() return", "exc_type, exc_value, exc_traceback, exclude_robot_traces=True): self.error = exc_value self._exc_type = exc_type", "last occurred exception. This method handles also exceptions containing unicode", "for entry in self._ignored_java_trace: if location.startswith(entry): return True return False", "`message` contains type and message of the original error, `traceback`", "return False module = traceback.tb_frame.f_globals.get('__name__') return module and module.startswith('robot.') class", "('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def _get_message(self): exc_name = self._get_name(self._exc_type)", "self._get_message() return self._message def _get_message(self): raise NotImplementedError @property def traceback(self):", "the License for the specific language governing permissions and #", "_get_traceback(self): tb = self._exc_traceback while tb and self._is_excluded_traceback(tb): tb =", "Apache License, Version 2.0 (the \"License\"); # you may not", "has attributes `message`, `traceback` and `error`, where `message` contains type", "self._message = None self._traceback = None @property def message(self): if", "return details.message, details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns an", "either express or implied. # See the License for the", "msg.split(':', 1) if len(tokens) == 2 and tokens[0] == name:", "last occurred exception It has attributes `message`, `traceback` and `error`,", "and message of the original error, `traceback` contains the traceback/stack", "' None' def _is_excluded_traceback(self, traceback): if not self._exclude_robot_traces: return False", "and even toString seem to throw NullPointerException if not self._is_out_of_memory_error(self._exc_type):", "= PythonErrorDetails \\ if not isinstance(exc_value, Throwable) else JavaErrorDetails return", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "return self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self, msg): lines = msg.splitlines() while", "not line: return True res = self._java_trace_re.match(line) if res is", "self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage() else: exc_msg = str(self.error) return self._format_message(exc_name,", "if not isinstance(exc_value, Throwable) else JavaErrorDetails return details(exc_type, exc_value, exc_traceback,", "self._traceback = None @property def message(self): if self._message is None:", "location.startswith(entry): return True return False def _clean_up_message(self, msg, name): msg", "import unic EXCLUDE_ROBOT_TRACES = not os.getenv('ROBOT_INTERNAL_TRACES') if JYTHON: from java.io", "java.lang import Throwable, OutOfMemoryError else: Throwable = () def get_error_message():", "= ('org.python.', 'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def _get_message(self): exc_name =", "toString seem to throw NullPointerException if not self._is_out_of_memory_error(self._exc_type): exc_msg =", "= self._get_name(self._exc_type) # OOME.getMessage and even toString seem to throw", "tokens = msg.split(':', 1) if len(tokens) == 2 and tokens[0]", "import Throwable, OutOfMemoryError else: Throwable = () def get_error_message(): \"\"\"Returns", "name).strip() def _remove_stack_trace_lines(self, msg): lines = msg.splitlines() while lines: if", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "def _get_details(self): if isinstance(self.error, RobotError): return self.error.details return 'Traceback (most", "messages from all exceptions originating outside the framework. \"\"\" return", "'robot.running.', 'robot$py.', 'sun.reflect.', 'java.lang.reflect.') def _get_message(self): exc_name = self._get_name(self._exc_type) #", "exceptions containing unicode messages. Thus it MUST be used to", "message) def _is_generic_exception(self, name): return (name in self._generic_exception_names or isinstance(self.error,", "not self._exclude_robot_traces: return False module = traceback.tb_frame.f_globals.get('__name__') return module and", "return False location = res.group(1) for entry in self._ignored_java_trace: if", "def get_error_message(): \"\"\"Returns error message of the last occurred exception.", "= traceback.tb_frame.f_globals.get('__name__') return module and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re =", "Networks # # Licensed under the Apache License, Version 2.0", "exclude_robot_traces=True): self.error = exc_value self._exc_type = exc_type self._exc_traceback = exc_traceback", "True res = self._java_trace_re.match(line) if res is None: return False", "ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns an object that wraps the", "limitations under the License. import os import re import sys", "\"License\"); # you may not use this file except in", "\"\"\" exc_type, exc_value, exc_traceback = exc_info or sys.exc_info() if exc_type", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "exc_value, exc_traceback = exc_info or sys.exc_info() if exc_type in RERAISED_EXCEPTIONS:", "self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg, name).strip() def _remove_stack_trace_lines(self, msg): lines = msg.splitlines()", "class PythonErrorDetails(_ErrorDetails): def _get_message(self): name = self._get_name(self._exc_type) return self._format_message(name, unic(self.error))", "return self._format_message(exc_name, exc_msg) def _is_out_of_memory_error(self, exc_type): return exc_type is OutOfMemoryError", "containing unicode messages. Thus it MUST be used to get", "exc_traceback, exclude_robot_traces=True): self.error = exc_value self._exc_type = exc_type self._exc_traceback =", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "trace and `error` contains the original error instance. \"\"\" exc_type,", "def _is_out_of_memory_error(self, exc_type): return exc_type is OutOfMemoryError def _get_details(self): #", "This method handles also exceptions containing unicode messages. Thus it", "PythonErrorDetails(_ErrorDetails): def _get_message(self): name = self._get_name(self._exc_type) return self._format_message(name, unic(self.error)) def", "it MUST be used to get messages from all exceptions", "if msg: details = details.replace(msg, '', 1) return details def", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "all exceptions originating outside the framework. \"\"\" return ErrorDetails().message def", "_get_name(self, exc_type): try: return exc_type.__name__ except AttributeError: return unic(exc_type) def", "try: return exc_type.__name__ except AttributeError: return unic(exc_type) def _format_message(self, name,", "message and details of the last occurred exception.\"\"\" details =", "the last occurred exception. This method handles also exceptions containing", "name if self._is_generic_exception(name): return message return '%s: %s' % (name,", "= self._exc_traceback while tb and self._is_excluded_traceback(tb): tb = tb.tb_next return", "You may obtain a copy of the License at #", "False def _clean_up_message(self, msg, name): msg = self._remove_stack_trace_lines(msg) return self._remove_exception_name(msg,", "sys import traceback from robot.errors import RobotError from .platform import", "msg.splitlines() while lines: if self._java_trace_re.match(lines[-1]): lines.pop() else: break return '\\n'.join(lines)", "'AssertionFailedError', 'Exception', 'Error', 'RuntimeError', 'RuntimeException') def __init__(self, exc_type, exc_value, exc_traceback,", "seem to throw NullPointerException if not self._is_out_of_memory_error(self._exc_type): exc_msg = self.error.getMessage()", "self._exc_traceback while tb and self._is_excluded_traceback(tb): tb = tb.tb_next return ''.join(traceback.format_tb(tb)).rstrip()", "type and message of the original error, `traceback` contains the", "details.message, details.traceback def ErrorDetails(exc_info=None, exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns an object", "if self._is_generic_exception(name): return message return '%s: %s' % (name, message)", "the Apache License, Version 2.0 (the \"License\"); # you may", "and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at (\\w.+)') _ignored_java_trace =", "len(tokens) == 2 and tokens[0] == name: msg = tokens[1]", "and # limitations under the License. import os import re", "traceback.tb_frame.f_globals.get('__name__') return module and module.startswith('robot.') class JavaErrorDetails(_ErrorDetails): _java_trace_re = re.compile('^\\s+at", "OOME.printStackTrace seems to throw NullPointerException if self._is_out_of_memory_error(self._exc_type): return '' output", "exclude_robot_traces=EXCLUDE_ROBOT_TRACES): \"\"\"This factory returns an object that wraps the last" ]
[ "StaticDedupe, Dedupe from dedupe.api import StaticRecordLink, RecordLink from dedupe.api import", "from dedupe.api import StaticGazetteer, Gazetteer from dedupe.core import randomPairs, randomPairsMatch,", "dedupe.api import StaticGazetteer, Gazetteer from dedupe.core import randomPairs, randomPairsMatch, frozendict", "import StaticGazetteer, Gazetteer from dedupe.core import randomPairs, randomPairsMatch, frozendict from", "dedupe.core import randomPairs, randomPairsMatch, frozendict from dedupe.convenience import consoleLabel, trainingDataDedupe,", "StaticRecordLink, RecordLink from dedupe.api import StaticGazetteer, Gazetteer from dedupe.core import", "RecordLink from dedupe.api import StaticGazetteer, Gazetteer from dedupe.core import randomPairs,", "randomPairs, randomPairsMatch, frozendict from dedupe.convenience import consoleLabel, trainingDataDedupe, trainingDataLink, canonicalize", "import StaticRecordLink, RecordLink from dedupe.api import StaticGazetteer, Gazetteer from dedupe.core", "import randomPairs, randomPairsMatch, frozendict from dedupe.convenience import consoleLabel, trainingDataDedupe, trainingDataLink,", "from dedupe.core import randomPairs, randomPairsMatch, frozendict from dedupe.convenience import consoleLabel,", "dedupe.api import StaticRecordLink, RecordLink from dedupe.api import StaticGazetteer, Gazetteer from", "Dedupe from dedupe.api import StaticRecordLink, RecordLink from dedupe.api import StaticGazetteer,", "dedupe.api import StaticDedupe, Dedupe from dedupe.api import StaticRecordLink, RecordLink from", "Gazetteer from dedupe.core import randomPairs, randomPairsMatch, frozendict from dedupe.convenience import", "from dedupe.api import StaticRecordLink, RecordLink from dedupe.api import StaticGazetteer, Gazetteer", "import StaticDedupe, Dedupe from dedupe.api import StaticRecordLink, RecordLink from dedupe.api", "from dedupe.api import StaticDedupe, Dedupe from dedupe.api import StaticRecordLink, RecordLink", "StaticGazetteer, Gazetteer from dedupe.core import randomPairs, randomPairsMatch, frozendict from dedupe.convenience" ]
[ "rate\", actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails", "exceeds positive threshold.\"\"\" result = verify(metric=\"Update latency\", actual=200, expected=100, threshold=0.1)", "\"\"\"Test fails when negative delta between actual rate and expected", "fails when delta between actual rate and expected rate exceeds", "TestVerify(TestCase): \"\"\"Tests misc.py verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when", "verify(metric=\"Update latency\", actual=50, expected=100, threshold=-0.01) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test", "threshold are all zero.\"\"\" result = verify(metric=\"Query failure rate\", actual=0.0,", "def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when delta between actual rate and", "between actual rate and expected rate exceeds positive threshold.\"\"\" result", "1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when negative delta between actual", "latency\", actual=50, expected=100, threshold=-0.01) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails", "and expected rate exceeds positive threshold.\"\"\" result = verify(metric=\"Update latency\",", "self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when delta between actual", "test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when positive delta between actual rate and", "def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when positive delta between actual rate", "result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=0.01) self.assertEqual(result, 0) if", "actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when", "= verify(metric=\"Update latency\", actual=200, expected=100, threshold=0.1) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self):", "test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when expected rate, actual rate and threshold", "rate exceeds negative threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100,", "verify(metric=\"Update latency\", actual=50, expected=100, threshold=0.01) self.assertEqual(result, 0) if __name__ ==", "fails when negative delta between actual rate and expected rate", "threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=0.01) self.assertEqual(result, 0)", "between actual rate and expected rate exceeds threshold.\"\"\" result =", "rate and threshold are all zero.\"\"\" result = verify(metric=\"Query failure", "threshold=0.1) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when negative delta", "delta between actual rate and expected rate exceeds positive threshold.\"\"\"", "actual=50, expected=100, threshold=0.01) self.assertEqual(result, 0) if __name__ == \"__main__\": unittest.main()", "when negative delta between actual rate and expected rate exceeds", "expected=0.0, threshold=0.0) self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when positive", "failure rate\", actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test", "delta between actual rate and expected rate exceeds threshold.\"\"\" result", "between actual rate and expected rate exceeds negative threshold.\"\"\" result", "when delta between actual rate and expected rate exceeds threshold.\"\"\"", "unittest from unittest import TestCase from misc import verify class", "self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when positive delta between", "expected=100, threshold=-0.01) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when delta", "= verify(metric=\"Update latency\", actual=50, expected=100, threshold=-0.01) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self):", "latency\", actual=200, expected=100, threshold=0.1) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails", "import unittest from unittest import TestCase from misc import verify", "actual rate and expected rate exceeds threshold.\"\"\" result = verify(metric=\"Update", "\"\"\"Tests misc.py verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when expected", "rate and expected rate exceeds negative threshold.\"\"\" result = verify(metric=\"Update", "misc import verify class TestVerify(TestCase): \"\"\"Tests misc.py verifies function.\"\"\" def", "actual=200, expected=100, threshold=0.1) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when", "when positive delta between actual rate and expected rate exceeds", "result = verify(metric=\"Update latency\", actual=200, expected=100, threshold=0.1) self.assertEqual(result, 1) def", "result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=-0.01) self.assertEqual(result, 1) def", "actual=50, expected=100, threshold=-0.01) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when", "latency\", actual=50, expected=100, threshold=0.01) self.assertEqual(result, 0) if __name__ == \"__main__\":", "unittest import TestCase from misc import verify class TestVerify(TestCase): \"\"\"Tests", "and expected rate exceeds threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50,", "verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when expected rate, actual", "rate, actual rate and threshold are all zero.\"\"\" result =", "\"\"\"Test fails when positive delta between actual rate and expected", "1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when delta between actual rate", "self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when negative delta between", "result = verify(metric=\"Query failure rate\", actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result, 0)", "are all zero.\"\"\" result = verify(metric=\"Query failure rate\", actual=0.0, expected=0.0,", "negative threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=-0.01) self.assertEqual(result,", "fails when positive delta between actual rate and expected rate", "passes when expected rate, actual rate and threshold are all", "from misc import verify class TestVerify(TestCase): \"\"\"Tests misc.py verifies function.\"\"\"", "verify class TestVerify(TestCase): \"\"\"Tests misc.py verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test", "delta between actual rate and expected rate exceeds negative threshold.\"\"\"", "exceeds negative threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=-0.01)", "from unittest import TestCase from misc import verify class TestVerify(TestCase):", "threshold=0.0) self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when positive delta", "test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when delta between actual rate and expected", "expected rate exceeds negative threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50,", "rate exceeds positive threshold.\"\"\" result = verify(metric=\"Update latency\", actual=200, expected=100,", "and threshold are all zero.\"\"\" result = verify(metric=\"Query failure rate\",", "function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when expected rate, actual rate", "when expected rate, actual rate and threshold are all zero.\"\"\"", "and expected rate exceeds negative threshold.\"\"\" result = verify(metric=\"Update latency\",", "rate and expected rate exceeds threshold.\"\"\" result = verify(metric=\"Update latency\",", "positive delta between actual rate and expected rate exceeds positive", "class TestVerify(TestCase): \"\"\"Tests misc.py verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes", "actual rate and threshold are all zero.\"\"\" result = verify(metric=\"Query", "threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=-0.01) self.assertEqual(result, 1)", "= verify(metric=\"Update latency\", actual=50, expected=100, threshold=0.01) self.assertEqual(result, 0) if __name__", "\"\"\"Test fails when delta between actual rate and expected rate", "expected rate exceeds threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100,", "positive threshold.\"\"\" result = verify(metric=\"Update latency\", actual=200, expected=100, threshold=0.1) self.assertEqual(result,", "def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when expected rate, actual rate and", "= verify(metric=\"Query failure rate\", actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result, 0) def", "expected=100, threshold=0.1) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when negative", "actual rate and expected rate exceeds negative threshold.\"\"\" result =", "misc.py verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self): \"\"\"Test passes when expected rate,", "threshold=-0.01) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_and_positive_threshold(self): \"\"\"Test fails when delta between", "negative delta between actual rate and expected rate exceeds negative", "exceeds threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=0.01) self.assertEqual(result,", "0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self): \"\"\"Test fails when positive delta between actual", "test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when negative delta between actual rate and", "actual rate and expected rate exceeds positive threshold.\"\"\" result =", "all zero.\"\"\" result = verify(metric=\"Query failure rate\", actual=0.0, expected=0.0, threshold=0.0)", "zero.\"\"\" result = verify(metric=\"Query failure rate\", actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result,", "expected rate, actual rate and threshold are all zero.\"\"\" result", "verify(metric=\"Update latency\", actual=200, expected=100, threshold=0.1) self.assertEqual(result, 1) def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test", "import TestCase from misc import verify class TestVerify(TestCase): \"\"\"Tests misc.py", "import verify class TestVerify(TestCase): \"\"\"Tests misc.py verifies function.\"\"\" def test_verify__with_zero_threshold_and_expected_succeeds(self):", "rate and expected rate exceeds positive threshold.\"\"\" result = verify(metric=\"Update", "def test_verify__fails_when_negative_delta_is_smaller_than_negative_threshold(self): \"\"\"Test fails when negative delta between actual rate", "TestCase from misc import verify class TestVerify(TestCase): \"\"\"Tests misc.py verifies", "rate exceeds threshold.\"\"\" result = verify(metric=\"Update latency\", actual=50, expected=100, threshold=0.01)", "expected rate exceeds positive threshold.\"\"\" result = verify(metric=\"Update latency\", actual=200,", "threshold.\"\"\" result = verify(metric=\"Update latency\", actual=200, expected=100, threshold=0.1) self.assertEqual(result, 1)", "verify(metric=\"Query failure rate\", actual=0.0, expected=0.0, threshold=0.0) self.assertEqual(result, 0) def test_verify__fails_when_positive_delta_is_larger_than_postive_threshold(self):", "\"\"\"Test passes when expected rate, actual rate and threshold are" ]
[ "one-dimensional (forward) FFT, of which `ifft` is the inverse ifft2", "in the same way as is returned by `fft`, i.e.,", "computed by `fft`. In other words, ``ifft(fft(a)) == a`` to", "s.imag.execute(), 'r--') ... >>> plt.legend(('real', 'imaginary')) ... >>> plt.show() \"\"\"", ">>> plt.legend(('real', 'imaginary')) ... >>> plt.show() \"\"\" a = astensor(a)", "over which to compute the inverse DFT. If not given,", "_op_type_ = OperandDef.IFFT def __init__(self, n=None, axis=-1, norm=None, dtype=None, **kw):", "2.0 (the \"License\"); # you may not use this file", "by `fft`. In other words, ``ifft(fft(a)) == a`` to within", "mode (see `numpy.fft`). Default is None. Returns ------- out :", "or the last one if `axis` is not specified. Raises", "Compute the one-dimensional inverse discrete Fourier Transform. This function computes", "of the values at the positive and negative Nyquist frequencies,", "inverse ifft2 : The two-dimensional inverse FFT. ifftn : The", "int, optional Length of the transformed axis of the output.", "is the common approach, it might lead to surprising results.", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "1:]`` should contain the negative-frequency terms, in increasing order starting", "def ifft(a, n=None, axis=-1, norm=None): \"\"\" Compute the one-dimensional inverse", "input parameter `n` is larger than the size of the", "used. norm : {None, \"ortho\"}, optional Normalization mode (see `numpy.fft`).", "plot a band-limited signal with random phases: >>> import matplotlib.pyplot", "opcodes as OperandDef from ..datasource import tensor as astensor from", ": array_like Input tensor, can be complex. n : int,", "An introduction, with definitions and general explanations. fft : The", "= mt.arange(400) >>> n = mt.zeros((400,), dtype=complex) >>> n[40:60] =", "with zeros. If `n` is not given, the length of", "= astensor(a) validate_fft(a, axis, norm) op = TensorIFFT(n=n, axis=axis, norm=norm,", "number of input points, ``A[n//2]`` represents the sum of the", "larger than the size of the input, the input is", "OperandDef.IFFT def __init__(self, n=None, axis=-1, norm=None, dtype=None, **kw): super().__init__(_n=n, _axis=axis,", "use this file except in compliance with the License. #", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 1999-2020", "and negative Nyquist frequencies, as the two are aliased together.", "different padding is desired, it must be performed before calling", ">>> n = mt.zeros((400,), dtype=complex) >>> n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi,", "In other words, ``ifft(fft(a)) == a`` to within numerical accuracy.", "input, the input is padded by appending zeros at the", "with random phases: >>> import matplotlib.pyplot as plt >>> t", "though this is the common approach, it might lead to", "Parameters ---------- a : array_like Input tensor, can be complex.", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "FFT. Notes ----- If the input parameter `n` is larger", "under the License is distributed on an \"AS IS\" BASIS,", "== a`` to within numerical accuracy. For a general description", "If a different padding is desired, it must be performed", "License for the specific language governing permissions and # limitations", ">>> s = mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(), s.imag.execute(),", "the two are aliased together. See `numpy.fft` for details. Parameters", "a different padding is desired, it must be performed before", "``ifft(fft(a)) == a`` to within numerical accuracy. For a general", "contain the negative-frequency terms, in increasing order starting from the", "to within numerical accuracy. For a general description of the", "Default is None. Returns ------- out : complex Tensor The", "validate_fft(a, axis, norm) op = TensorIFFT(n=n, axis=axis, norm=norm, dtype=np.dtype(np.complex_)) return", "general explanations. fft : The one-dimensional (forward) FFT, of which", ": The one-dimensional (forward) FFT, of which `ifft` is the", "'imaginary')) ... >>> plt.show() \"\"\" a = astensor(a) validate_fft(a, axis,", "IndexError If `axes` is larger than the last axis of", "in compliance with the License. # You may obtain a", "as OperandDef from ..datasource import tensor as astensor from .core", "software # distributed under the License is distributed on an", "given, the last axis is used. norm : {None, \"ortho\"},", "the input along the axis specified by `axis` is used.", "If the input parameter `n` is larger than the size", "Holding Ltd. # # Licensed under the Apache License, Version", "governing permissions and # limitations under the License. import numpy", "TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT def __init__(self, n=None,", "band-limited signal with random phases: >>> import matplotlib.pyplot as plt", "{None, \"ortho\"}, optional Normalization mode (see `numpy.fft`). Default is None.", "is padded with zeros. If `n` is not given, the", "plt >>> t = mt.arange(400) >>> n = mt.zeros((400,), dtype=complex)", "order starting from the most negative frequency. For an even", ">>> n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>> s = mt.fft.ifft(n)", "words, ``ifft(fft(a)) == a`` to within numerical accuracy. For a", "of the algorithm and definitions, see `mt.fft`. The input should", "Alibaba Group Holding Ltd. # # Licensed under the Apache", "transformed axis of the output. If `n` is smaller than", "the axis indicated by `axis`, or the last one if", "0.+1.j, -1.+0.j, 0.-1.j]) Create and plot a band-limited signal with", "-1.+0.j, 0.-1.j]) Create and plot a band-limited signal with random", "as astensor from .core import TensorComplexFFTMixin, validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT,", ": complex Tensor The truncated or zero-padded input, transformed along", "t = mt.arange(400) >>> n = mt.zeros((400,), dtype=complex) >>> n[40:60]", "other words, ``ifft(fft(a)) == a`` to within numerical accuracy. For", "zero-padded input, transformed along the axis indicated by `axis`, or", "``a[1:n//2]`` should contain the positive-frequency terms, * ``a[n//2 + 1:]``", "the last axis is used. norm : {None, \"ortho\"}, optional", "axis, norm) op = TensorIFFT(n=n, axis=axis, norm=norm, dtype=np.dtype(np.complex_)) return op(a)", "should contain the positive-frequency terms, * ``a[n//2 + 1:]`` should", "s.real.execute(), 'b-', t.execute(), s.imag.execute(), 'r--') ... >>> plt.legend(('real', 'imaginary')) ...", "norm=None): \"\"\" Compute the one-dimensional inverse discrete Fourier Transform. This", "# -*- coding: utf-8 -*- # Copyright 1999-2020 Alibaba Group", "ordered in the same way as is returned by `fft`,", "# limitations under the License. import numpy as np from", "= mt.zeros((400,), dtype=complex) >>> n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>>", "* ``a[0]`` should contain the zero frequency term, * ``a[1:n//2]``", "`axis` is not specified. Raises ------ IndexError If `axes` is", "np from ... import opcodes as OperandDef from ..datasource import", "the zero frequency term, * ``a[1:n//2]`` should contain the positive-frequency", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "and # limitations under the License. import numpy as np", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "If it is larger, the input is padded with zeros.", "zeros. If `n` is not given, the length of the", "to in writing, software # distributed under the License is", ": int, optional Length of the transformed axis of the", "* ``a[n//2 + 1:]`` should contain the negative-frequency terms, in", "and definitions, see `mt.fft`. The input should be ordered in", "# See the License for the specific language governing permissions", "a`` to within numerical accuracy. For a general description of", "or agreed to in writing, software # distributed under the", "See notes about padding issues. axis : int, optional Axis", "dtype=None, **kw): super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype, **kw) def ifft(a, n=None,", "required by applicable law or agreed to in writing, software", "The input should be ordered in the same way as", "the inverse DFT. If not given, the last axis is", "of `a`. See Also -------- mt.fft : An introduction, with", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "t.execute(), s.imag.execute(), 'r--') ... >>> plt.legend(('real', 'imaginary')) ... >>> plt.show()", "`numpy.fft`). Default is None. Returns ------- out : complex Tensor", "`n` is larger than the size of the input, the", "with the License. # You may obtain a copy of", "the sum of the values at the positive and negative", "padding issues. axis : int, optional Axis over which to", "this is the common approach, it might lead to surprising", "negative Nyquist frequencies, as the two are aliased together. See", "as plt >>> t = mt.arange(400) >>> n = mt.zeros((400,),", "or zero-padded input, transformed along the axis indicated by `axis`,", "the transformed axis of the output. If `n` is smaller", "`ifft` is the inverse ifft2 : The two-dimensional inverse FFT.", "Input tensor, can be complex. n : int, optional Length", "_axis=axis, _norm=norm, _dtype=dtype, **kw) def ifft(a, n=None, axis=-1, norm=None): \"\"\"", "a : array_like Input tensor, can be complex. n :", "input, transformed along the axis indicated by `axis`, or the", "`fft`, i.e., * ``a[0]`` should contain the zero frequency term,", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "as mt >>> mt.fft.ifft([0, 4, 0, 0]).execute() array([ 1.+0.j, 0.+1.j,", "import tensor as astensor from .core import TensorComplexFFTMixin, validate_fft, TensorStandardFFT", "_dtype=dtype, **kw) def ifft(a, n=None, axis=-1, norm=None): \"\"\" Compute the", "distributed under the License is distributed on an \"AS IS\"", "last axis is used. norm : {None, \"ortho\"}, optional Normalization", "ifft(a, n=None, axis=-1, norm=None): \"\"\" Compute the one-dimensional inverse discrete", "the length of the input, the input is cropped. If", "cropped. If it is larger, the input is padded with", "permissions and # limitations under the License. import numpy as", "mt.arange(400) >>> n = mt.zeros((400,), dtype=complex) >>> n[40:60] = mt.exp(1j*mt.random.uniform(0,", "express or implied. # See the License for the specific", "frequency. For an even number of input points, ``A[n//2]`` represents", "accuracy. For a general description of the algorithm and definitions,", "except in compliance with the License. # You may obtain", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", ".core import TensorComplexFFTMixin, validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ =", "description of the algorithm and definitions, see `mt.fft`. The input", "TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT def __init__(self, n=None, axis=-1, norm=None, dtype=None,", "writing, software # distributed under the License is distributed on", "zeros at the end. Even though this is the common", "The n-dimensional inverse FFT. Notes ----- If the input parameter", "The truncated or zero-padded input, transformed along the axis indicated", "you may not use this file except in compliance with", "axis of the output. If `n` is smaller than the", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "length of the input, the input is cropped. If it", "the License. import numpy as np from ... import opcodes", "<reponame>tomzhang/mars-1 #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright", "terms, in increasing order starting from the most negative frequency.", "the one-dimensional inverse discrete Fourier Transform. This function computes the", "``A[n//2]`` represents the sum of the values at the positive", "-------- >>> import mars.tensor as mt >>> mt.fft.ifft([0, 4, 0,", "the most negative frequency. For an even number of input", "sum of the values at the positive and negative Nyquist", "aliased together. See `numpy.fft` for details. Parameters ---------- a :", "-------- mt.fft : An introduction, with definitions and general explanations.", "size of the input, the input is padded by appending", "`n` is smaller than the length of the input, the", "class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT def __init__(self, n=None, axis=-1,", "larger than the last axis of `a`. See Also --------", "= mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(), s.imag.execute(), 'r--') ...", "CONDITIONS OF ANY KIND, either express or implied. # See", "------ IndexError If `axes` is larger than the last axis", "axis of `a`. See Also -------- mt.fft : An introduction,", "For an even number of input points, ``A[n//2]`` represents the", "is not given, the length of the input along the", "import matplotlib.pyplot as plt >>> t = mt.arange(400) >>> n", "contain the positive-frequency terms, * ``a[n//2 + 1:]`` should contain", "is desired, it must be performed before calling `ifft`. Examples", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "``a[0]`` should contain the zero frequency term, * ``a[1:n//2]`` should", "utf-8 -*- # Copyright 1999-2020 Alibaba Group Holding Ltd. #", "output. If `n` is smaller than the length of the", "If `n` is not given, the length of the input", "_norm=norm, _dtype=dtype, **kw) def ifft(a, n=None, axis=-1, norm=None): \"\"\" Compute", "of which `ifft` is the inverse ifft2 : The two-dimensional", "super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype, **kw) def ifft(a, n=None, axis=-1, norm=None):", "the positive-frequency terms, * ``a[n//2 + 1:]`` should contain the", "the input is padded with zeros. If `n` is not", "(forward) FFT, of which `ifft` is the inverse ifft2 :", "import TensorComplexFFTMixin, validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT", "0.-1.j]) Create and plot a band-limited signal with random phases:", "the values at the positive and negative Nyquist frequencies, as", "returned by `fft`, i.e., * ``a[0]`` should contain the zero", "Copyright 1999-2020 Alibaba Group Holding Ltd. # # Licensed under", "def __init__(self, n=None, axis=-1, norm=None, dtype=None, **kw): super().__init__(_n=n, _axis=axis, _norm=norm,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "points, ``A[n//2]`` represents the sum of the values at the", "Nyquist frequencies, as the two are aliased together. See `numpy.fft`", "indicated by `axis`, or the last one if `axis` is", "and general explanations. fft : The one-dimensional (forward) FFT, of", "FFT, of which `ifft` is the inverse ifft2 : The", "astensor(a) validate_fft(a, axis, norm) op = TensorIFFT(n=n, axis=axis, norm=norm, dtype=np.dtype(np.complex_))", "transformed along the axis indicated by `axis`, or the last", "the License is distributed on an \"AS IS\" BASIS, #", "be ordered in the same way as is returned by", "last one if `axis` is not specified. Raises ------ IndexError", "is padded by appending zeros at the end. Even though", "along the axis specified by `axis` is used. See notes", "input is cropped. If it is larger, the input is", "notes about padding issues. axis : int, optional Axis over", "to compute the inverse DFT. If not given, the last", "is used. See notes about padding issues. axis : int,", ": int, optional Axis over which to compute the inverse", "by `axis`, or the last one if `axis` is not", "FFT. ifftn : The n-dimensional inverse FFT. Notes ----- If", "**kw): super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype, **kw) def ifft(a, n=None, axis=-1,", "lead to surprising results. If a different padding is desired,", "is larger than the size of the input, the input", "tensor, can be complex. n : int, optional Length of", "can be complex. n : int, optional Length of the", "one if `axis` is not specified. Raises ------ IndexError If", "optional Axis over which to compute the inverse DFT. If", "fft : The one-dimensional (forward) FFT, of which `ifft` is", "law or agreed to in writing, software # distributed under", "which `ifft` is the inverse ifft2 : The two-dimensional inverse", "as the two are aliased together. See `numpy.fft` for details.", "frequencies, as the two are aliased together. See `numpy.fft` for", "a = astensor(a) validate_fft(a, axis, norm) op = TensorIFFT(n=n, axis=axis,", "the input is padded by appending zeros at the end.", "negative frequency. For an even number of input points, ``A[n//2]``", "= OperandDef.IFFT def __init__(self, n=None, axis=-1, norm=None, dtype=None, **kw): super().__init__(_n=n,", "import opcodes as OperandDef from ..datasource import tensor as astensor", "mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>> s = mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(),", "If not given, the last axis is used. norm :", "input along the axis specified by `axis` is used. See", "the inverse ifft2 : The two-dimensional inverse FFT. ifftn :", "with definitions and general explanations. fft : The one-dimensional (forward)", "truncated or zero-padded input, transformed along the axis indicated by", "... >>> plt.show() \"\"\" a = astensor(a) validate_fft(a, axis, norm)", "not given, the length of the input along the axis", "zero frequency term, * ``a[1:n//2]`` should contain the positive-frequency terms,", "of the input, the input is cropped. If it is", "... >>> plt.legend(('real', 'imaginary')) ... >>> plt.show() \"\"\" a =", "may obtain a copy of the License at # #", "-*- coding: utf-8 -*- # Copyright 1999-2020 Alibaba Group Holding", "might lead to surprising results. If a different padding is", "Returns ------- out : complex Tensor The truncated or zero-padded", "the length of the input along the axis specified by", "the input parameter `n` is larger than the size of", "the negative-frequency terms, in increasing order starting from the most", "= mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>> s = mt.fft.ifft(n) >>> plt.plot(t.execute(),", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "matplotlib.pyplot as plt >>> t = mt.arange(400) >>> n =", "which to compute the inverse DFT. If not given, the", "may not use this file except in compliance with the", "`axis` is used. See notes about padding issues. axis :", "last axis of `a`. See Also -------- mt.fft : An", "Create and plot a band-limited signal with random phases: >>>", "two are aliased together. See `numpy.fft` for details. Parameters ----------", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "contain the zero frequency term, * ``a[1:n//2]`` should contain the", "the axis specified by `axis` is used. See notes about", "this file except in compliance with the License. # You", "performed before calling `ifft`. Examples -------- >>> import mars.tensor as", "OperandDef from ..datasource import tensor as astensor from .core import", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "if `axis` is not specified. Raises ------ IndexError If `axes`", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "n=None, axis=-1, norm=None, dtype=None, **kw): super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype, **kw)", "inverse FFT. ifftn : The n-dimensional inverse FFT. Notes -----", "be complex. n : int, optional Length of the transformed", "input is padded by appending zeros at the end. Even", "plt.legend(('real', 'imaginary')) ... >>> plt.show() \"\"\" a = astensor(a) validate_fft(a,", "The one-dimensional (forward) FFT, of which `ifft` is the inverse", "to surprising results. If a different padding is desired, it", "TensorComplexFFTMixin, validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT def", "transform computed by `fft`. In other words, ``ifft(fft(a)) == a``", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "desired, it must be performed before calling `ifft`. Examples --------", "before calling `ifft`. Examples -------- >>> import mars.tensor as mt", "If `axes` is larger than the last axis of `a`.", "optional Normalization mode (see `numpy.fft`). Default is None. Returns -------", "\"\"\" a = astensor(a) validate_fft(a, axis, norm) op = TensorIFFT(n=n,", "See `numpy.fft` for details. Parameters ---------- a : array_like Input", "computes the inverse of the one-dimensional *n*-point discrete Fourier transform", "Ltd. # # Licensed under the Apache License, Version 2.0", "inverse discrete Fourier Transform. This function computes the inverse of", "language governing permissions and # limitations under the License. import", "than the size of the input, the input is padded", "(see `numpy.fft`). Default is None. Returns ------- out : complex", "Raises ------ IndexError If `axes` is larger than the last", "of the input along the axis specified by `axis` is", "should contain the negative-frequency terms, in increasing order starting from", "1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j]) Create and plot a band-limited signal", "the input, the input is padded by appending zeros at", "validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT def __init__(self,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "than the last axis of `a`. See Also -------- mt.fft", "of the one-dimensional *n*-point discrete Fourier transform computed by `fft`.", "it is larger, the input is padded with zeros. If", "must be performed before calling `ifft`. Examples -------- >>> import", "the positive and negative Nyquist frequencies, as the two are", "or implied. # See the License for the specific language", "smaller than the length of the input, the input is", "mt.fft.ifft([0, 4, 0, 0]).execute() array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j]) Create", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "it must be performed before calling `ifft`. Examples -------- >>>", "ifftn : The n-dimensional inverse FFT. Notes ----- If the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "should contain the zero frequency term, * ``a[1:n//2]`` should contain", "0, 0]).execute() array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j]) Create and plot", "the inverse of the one-dimensional *n*-point discrete Fourier transform computed", "parameter `n` is larger than the size of the input,", "calling `ifft`. Examples -------- >>> import mars.tensor as mt >>>", "is cropped. If it is larger, the input is padded", "padding is desired, it must be performed before calling `ifft`.", "'b-', t.execute(), s.imag.execute(), 'r--') ... >>> plt.legend(('real', 'imaginary')) ... >>>", "Axis over which to compute the inverse DFT. If not", "'r--') ... >>> plt.legend(('real', 'imaginary')) ... >>> plt.show() \"\"\" a", "(the \"License\"); # you may not use this file except", "than the length of the input, the input is cropped.", "plt.show() \"\"\" a = astensor(a) validate_fft(a, axis, norm) op =", "# you may not use this file except in compliance", ">>> import mars.tensor as mt >>> mt.fft.ifft([0, 4, 0, 0]).execute()", "mars.tensor as mt >>> mt.fft.ifft([0, 4, 0, 0]).execute() array([ 1.+0.j,", "is returned by `fft`, i.e., * ``a[0]`` should contain the", "by `axis` is used. See notes about padding issues. axis", "`mt.fft`. The input should be ordered in the same way", "by `fft`, i.e., * ``a[0]`` should contain the zero frequency", "algorithm and definitions, see `mt.fft`. The input should be ordered", "from ..datasource import tensor as astensor from .core import TensorComplexFFTMixin,", "input, the input is cropped. If it is larger, the", "the algorithm and definitions, see `mt.fft`. The input should be", "the same way as is returned by `fft`, i.e., *", "explanations. fft : The one-dimensional (forward) FFT, of which `ifft`", "not specified. Raises ------ IndexError If `axes` is larger than", "# # Unless required by applicable law or agreed to", "larger, the input is padded with zeros. If `n` is", "given, the length of the input along the axis specified", "the common approach, it might lead to surprising results. If", "array_like Input tensor, can be complex. n : int, optional", "general description of the algorithm and definitions, see `mt.fft`. The", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "see `mt.fft`. The input should be ordered in the same", ": An introduction, with definitions and general explanations. fft :", "2*mt.pi, (20,))) >>> s = mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(), 'b-',", "Version 2.0 (the \"License\"); # you may not use this", "the end. Even though this is the common approach, it", "mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(), s.imag.execute(), 'r--') ... >>>", "of the output. If `n` is smaller than the length", "* ``a[1:n//2]`` should contain the positive-frequency terms, * ``a[n//2 +", "one-dimensional *n*-point discrete Fourier transform computed by `fft`. In other", "0]).execute() array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j]) Create and plot a", "axis is used. norm : {None, \"ortho\"}, optional Normalization mode", "a band-limited signal with random phases: >>> import matplotlib.pyplot as", "specified. Raises ------ IndexError If `axes` is larger than the", "way as is returned by `fft`, i.e., * ``a[0]`` should", "implied. # See the License for the specific language governing", ">>> plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(), s.imag.execute(), 'r--') ... >>> plt.legend(('real',", "of input points, ``A[n//2]`` represents the sum of the values", "under the Apache License, Version 2.0 (the \"License\"); # you", "..datasource import tensor as astensor from .core import TensorComplexFFTMixin, validate_fft,", "+ 1:]`` should contain the negative-frequency terms, in increasing order", "together. See `numpy.fft` for details. Parameters ---------- a : array_like", "complex Tensor The truncated or zero-padded input, transformed along the", "definitions, see `mt.fft`. The input should be ordered in the", "by applicable law or agreed to in writing, software #", "as is returned by `fft`, i.e., * ``a[0]`` should contain", "mt.zeros((400,), dtype=complex) >>> n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>> s", "inverse of the one-dimensional *n*-point discrete Fourier transform computed by", "TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_ = OperandDef.IFFT def __init__(self, n=None, axis=-1, norm=None,", "# Copyright 1999-2020 Alibaba Group Holding Ltd. # # Licensed", "details. Parameters ---------- a : array_like Input tensor, can be", "is not specified. Raises ------ IndexError If `axes` is larger", "python # -*- coding: utf-8 -*- # Copyright 1999-2020 Alibaba", "numerical accuracy. For a general description of the algorithm and", "in increasing order starting from the most negative frequency. For", "complex. n : int, optional Length of the transformed axis", "Notes ----- If the input parameter `n` is larger than", "Length of the transformed axis of the output. If `n`", "results. If a different padding is desired, it must be", "Group Holding Ltd. # # Licensed under the Apache License,", "frequency term, * ``a[1:n//2]`` should contain the positive-frequency terms, *", "License. import numpy as np from ... import opcodes as", "axis=-1, norm=None): \"\"\" Compute the one-dimensional inverse discrete Fourier Transform.", "n : int, optional Length of the transformed axis of", "axis indicated by `axis`, or the last one if `axis`", "the output. If `n` is smaller than the length of", "issues. axis : int, optional Axis over which to compute", "the last one if `axis` is not specified. Raises ------", "from ... import opcodes as OperandDef from ..datasource import tensor", "`fft`. In other words, ``ifft(fft(a)) == a`` to within numerical", "padded with zeros. If `n` is not given, the length", "mt.fft : An introduction, with definitions and general explanations. fft", ">>> plt.show() \"\"\" a = astensor(a) validate_fft(a, axis, norm) op", "Tensor The truncated or zero-padded input, transformed along the axis", "introduction, with definitions and general explanations. fft : The one-dimensional", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "`ifft`. Examples -------- >>> import mars.tensor as mt >>> mt.fft.ifft([0,", "Unless required by applicable law or agreed to in writing,", "the input, the input is cropped. If it is larger,", "For a general description of the algorithm and definitions, see", "Fourier Transform. This function computes the inverse of the one-dimensional", "the specific language governing permissions and # limitations under the", "negative-frequency terms, in increasing order starting from the most negative", "about padding issues. axis : int, optional Axis over which", "``a[n//2 + 1:]`` should contain the negative-frequency terms, in increasing", "applicable law or agreed to in writing, software # distributed", "... import opcodes as OperandDef from ..datasource import tensor as", "astensor from .core import TensorComplexFFTMixin, validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin):", "If `n` is smaller than the length of the input,", "n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>> s = mt.fft.ifft(n) >>>", "coding: utf-8 -*- # Copyright 1999-2020 Alibaba Group Holding Ltd.", "of the transformed axis of the output. If `n` is", "The two-dimensional inverse FFT. ifftn : The n-dimensional inverse FFT.", "----- If the input parameter `n` is larger than the", "in writing, software # distributed under the License is distributed", "Fourier transform computed by `fft`. In other words, ``ifft(fft(a)) ==", "by appending zeros at the end. Even though this is", "for details. Parameters ---------- a : array_like Input tensor, can", ">>> mt.fft.ifft([0, 4, 0, 0]).execute() array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j])", "padded by appending zeros at the end. Even though this", "import mars.tensor as mt >>> mt.fft.ifft([0, 4, 0, 0]).execute() array([", "positive-frequency terms, * ``a[n//2 + 1:]`` should contain the negative-frequency", "tensor as astensor from .core import TensorComplexFFTMixin, validate_fft, TensorStandardFFT class", "Also -------- mt.fft : An introduction, with definitions and general", "term, * ``a[1:n//2]`` should contain the positive-frequency terms, * ``a[n//2", "phases: >>> import matplotlib.pyplot as plt >>> t = mt.arange(400)", "inverse DFT. If not given, the last axis is used.", "axis=-1, norm=None, dtype=None, **kw): super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype, **kw) def", "None. Returns ------- out : complex Tensor The truncated or", "the size of the input, the input is padded by", "n-dimensional inverse FFT. Notes ----- If the input parameter `n`", "Examples -------- >>> import mars.tensor as mt >>> mt.fft.ifft([0, 4,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "axis : int, optional Axis over which to compute the", "surprising results. If a different padding is desired, it must", "`numpy.fft` for details. Parameters ---------- a : array_like Input tensor,", "License, Version 2.0 (the \"License\"); # you may not use", "`n` is not given, the length of the input along", "# You may obtain a copy of the License at", "approach, it might lead to surprising results. If a different", "array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j]) Create and plot a band-limited", "dtype=complex) >>> n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,))) >>> s =", "**kw) def ifft(a, n=None, axis=-1, norm=None): \"\"\" Compute the one-dimensional", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "input should be ordered in the same way as is", ": {None, \"ortho\"}, optional Normalization mode (see `numpy.fft`). Default is", "`axis`, or the last one if `axis` is not specified.", "and plot a band-limited signal with random phases: >>> import", "is the inverse ifft2 : The two-dimensional inverse FFT. ifftn", "optional Length of the transformed axis of the output. If", "end. Even though this is the common approach, it might", "random phases: >>> import matplotlib.pyplot as plt >>> t =", "along the axis indicated by `axis`, or the last one", "the one-dimensional *n*-point discrete Fourier transform computed by `fft`. In", "within numerical accuracy. For a general description of the algorithm", "n = mt.zeros((400,), dtype=complex) >>> n[40:60] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20,)))", "inverse FFT. Notes ----- If the input parameter `n` is", ">>> t = mt.arange(400) >>> n = mt.zeros((400,), dtype=complex) >>>", "two-dimensional inverse FFT. ifftn : The n-dimensional inverse FFT. Notes", "under the License. import numpy as np from ... import", "the License for the specific language governing permissions and #", "is smaller than the length of the input, the input", "`axes` is larger than the last axis of `a`. See", "Apache License, Version 2.0 (the \"License\"); # you may not", "norm=None, dtype=None, **kw): super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype, **kw) def ifft(a,", "either express or implied. # See the License for the", "Even though this is the common approach, it might lead", ">>> import matplotlib.pyplot as plt >>> t = mt.arange(400) >>>", "See Also -------- mt.fft : An introduction, with definitions and", "values at the positive and negative Nyquist frequencies, as the", "it might lead to surprising results. If a different padding", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "from the most negative frequency. For an even number of", "the last axis of `a`. See Also -------- mt.fft :", "specified by `axis` is used. See notes about padding issues.", "an even number of input points, ``A[n//2]`` represents the sum", "limitations under the License. import numpy as np from ...", "norm : {None, \"ortho\"}, optional Normalization mode (see `numpy.fft`). Default", "as np from ... import opcodes as OperandDef from ..datasource", "of the input, the input is padded by appending zeros", "signal with random phases: >>> import matplotlib.pyplot as plt >>>", ": The two-dimensional inverse FFT. ifftn : The n-dimensional inverse", "used. See notes about padding issues. axis : int, optional", "discrete Fourier transform computed by `fft`. In other words, ``ifft(fft(a))", "function computes the inverse of the one-dimensional *n*-point discrete Fourier", "should be ordered in the same way as is returned", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "out : complex Tensor The truncated or zero-padded input, transformed", "`a`. See Also -------- mt.fft : An introduction, with definitions", "4, 0, 0]).execute() array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j]) Create and", "a general description of the algorithm and definitions, see `mt.fft`.", "import numpy as np from ... import opcodes as OperandDef", "is None. Returns ------- out : complex Tensor The truncated", ": The n-dimensional inverse FFT. Notes ----- If the input", "i.e., * ``a[0]`` should contain the zero frequency term, *", "__init__(self, n=None, axis=-1, norm=None, dtype=None, **kw): super().__init__(_n=n, _axis=axis, _norm=norm, _dtype=dtype,", "definitions and general explanations. fft : The one-dimensional (forward) FFT,", "input points, ``A[n//2]`` represents the sum of the values at", "is larger than the last axis of `a`. See Also", "Transform. This function computes the inverse of the one-dimensional *n*-point", "not given, the last axis is used. norm : {None,", "one-dimensional inverse discrete Fourier Transform. This function computes the inverse", "at the end. Even though this is the common approach,", "the input is cropped. If it is larger, the input", "1999-2020 Alibaba Group Holding Ltd. # # Licensed under the", "n=None, axis=-1, norm=None): \"\"\" Compute the one-dimensional inverse discrete Fourier", "\"\"\" Compute the one-dimensional inverse discrete Fourier Transform. This function", "------- out : complex Tensor The truncated or zero-padded input,", "starting from the most negative frequency. For an even number", "This function computes the inverse of the one-dimensional *n*-point discrete", "from .core import TensorComplexFFTMixin, validate_fft, TensorStandardFFT class TensorIFFT(TensorStandardFFT, TensorComplexFFTMixin): _op_type_", "\"License\"); # you may not use this file except in", "appending zeros at the end. Even though this is the", "even number of input points, ``A[n//2]`` represents the sum of", "is larger, the input is padded with zeros. If `n`", "numpy as np from ... import opcodes as OperandDef from", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "mt >>> mt.fft.ifft([0, 4, 0, 0]).execute() array([ 1.+0.j, 0.+1.j, -1.+0.j,", "plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(), s.imag.execute(), 'r--') ... >>> plt.legend(('real', 'imaginary'))", "\"ortho\"}, optional Normalization mode (see `numpy.fft`). Default is None. Returns", "# distributed under the License is distributed on an \"AS", "positive and negative Nyquist frequencies, as the two are aliased", "*n*-point discrete Fourier transform computed by `fft`. In other words,", "# Unless required by applicable law or agreed to in", "s = mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(), s.imag.execute(), 'r--')", "same way as is returned by `fft`, i.e., * ``a[0]``", "represents the sum of the values at the positive and", "at the positive and negative Nyquist frequencies, as the two", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "length of the input along the axis specified by `axis`", "DFT. If not given, the last axis is used. norm", "discrete Fourier Transform. This function computes the inverse of the", "be performed before calling `ifft`. Examples -------- >>> import mars.tensor", "You may obtain a copy of the License at #", "int, optional Axis over which to compute the inverse DFT.", "---------- a : array_like Input tensor, can be complex. n", "axis specified by `axis` is used. See notes about padding", "(20,))) >>> s = mt.fft.ifft(n) >>> plt.plot(t.execute(), s.real.execute(), 'b-', t.execute(),", "compute the inverse DFT. If not given, the last axis", "most negative frequency. For an even number of input points,", "the Apache License, Version 2.0 (the \"License\"); # you may", "Normalization mode (see `numpy.fft`). Default is None. Returns ------- out", "-*- # Copyright 1999-2020 Alibaba Group Holding Ltd. # #", "terms, * ``a[n//2 + 1:]`` should contain the negative-frequency terms,", "input is padded with zeros. If `n` is not given,", "increasing order starting from the most negative frequency. For an", "ifft2 : The two-dimensional inverse FFT. ifftn : The n-dimensional", "common approach, it might lead to surprising results. If a", "is used. norm : {None, \"ortho\"}, optional Normalization mode (see", "are aliased together. See `numpy.fft` for details. Parameters ---------- a" ]
[]
[ "= float(time) radius = radius/100 area = pow(radius,2)*math.pi averageEMF =", "= input(\"Input the second magnetic Field value (T): \") time", "float(magField1) time = float(time) radius = radius/100 area = pow(radius,2)*math.pi", "radius = float(radius) resistance = float(resistance) magField0 = float(magField0) magField1", "the second magnetic Field value (T): \") time = input(\"Input", "\") magField0 = input(\"Input the first magnetic Field value (T):", "input(\"Input the first magnetic Field value (T): \") magField1 =", "\") turns = float(turns) radius = float(radius) resistance = float(resistance)", "float(magField0) magField1 = float(magField1) time = float(time) radius = radius/100", "import math extraNumber = 4 * math.pi * pow(10,-7) def", "extraNumber = 4 * math.pi * pow(10,-7) def avgEMF(): turns", "float(turns) radius = float(radius) resistance = float(resistance) magField0 = float(magField0)", "as np import math extraNumber = 4 * math.pi *", "(T): \") magField1 = input(\"Input the second magnetic Field value", "input(\"Input the second magnetic Field value (T): \") time =", "time (s): \") turns = float(turns) radius = float(radius) resistance", "Field value (T): \") magField1 = input(\"Input the second magnetic", "(T): \") time = input(\"Input the time (s): \") turns", "input(\"Input resistance (Ω): \") magField0 = input(\"Input the first magnetic", "math.pi * pow(10,-7) def avgEMF(): turns = input(\"Input how many", "float(resistance) magField0 = float(magField0) magField1 = float(magField1) time = float(time)", "4 * math.pi * pow(10,-7) def avgEMF(): turns = input(\"Input", "magField0 = input(\"Input the first magnetic Field value (T): \")", "= float(turns) radius = float(radius) resistance = float(resistance) magField0 =", "magnetic Field value (T): \") magField1 = input(\"Input the second", "resistance = input(\"Input resistance (Ω): \") magField0 = input(\"Input the", "<reponame>illusion173/Physics250 import numpy as np import math extraNumber = 4", "(Ω): \") magField0 = input(\"Input the first magnetic Field value", "\") time = input(\"Input the time (s): \") turns =", "= pow(radius,2)*math.pi averageEMF = turns * area * ((magField1-magField0)/time) print(averageEMF)", "pow(10,-7) def avgEMF(): turns = input(\"Input how many turns: \")", "Field value (T): \") time = input(\"Input the time (s):", "float(time) radius = radius/100 area = pow(radius,2)*math.pi averageEMF = turns", "area = pow(radius,2)*math.pi averageEMF = turns * area * ((magField1-magField0)/time)", "the radius (cm):\") resistance = input(\"Input resistance (Ω): \") magField0", "magField1 = input(\"Input the second magnetic Field value (T): \")", "magField1 = float(magField1) time = float(time) radius = radius/100 area", "time = float(time) radius = radius/100 area = pow(radius,2)*math.pi averageEMF", "(cm):\") resistance = input(\"Input resistance (Ω): \") magField0 = input(\"Input", "resistance = float(resistance) magField0 = float(magField0) magField1 = float(magField1) time", "float(radius) resistance = float(resistance) magField0 = float(magField0) magField1 = float(magField1)", "first magnetic Field value (T): \") magField1 = input(\"Input the", "* math.pi * pow(10,-7) def avgEMF(): turns = input(\"Input how", "second magnetic Field value (T): \") time = input(\"Input the", "turns: \") radius = input(\"Input the radius (cm):\") resistance =", "value (T): \") time = input(\"Input the time (s): \")", "turns = float(turns) radius = float(radius) resistance = float(resistance) magField0", "= radius/100 area = pow(radius,2)*math.pi averageEMF = turns * area", "radius/100 area = pow(radius,2)*math.pi averageEMF = turns * area *", "input(\"Input the radius (cm):\") resistance = input(\"Input resistance (Ω): \")", "= float(radius) resistance = float(resistance) magField0 = float(magField0) magField1 =", "= float(magField0) magField1 = float(magField1) time = float(time) radius =", "magField0 = float(magField0) magField1 = float(magField1) time = float(time) radius", "radius (cm):\") resistance = input(\"Input resistance (Ω): \") magField0 =", "turns = input(\"Input how many turns: \") radius = input(\"Input", "input(\"Input the time (s): \") turns = float(turns) radius =", "= 4 * math.pi * pow(10,-7) def avgEMF(): turns =", "= input(\"Input resistance (Ω): \") magField0 = input(\"Input the first", "= float(magField1) time = float(time) radius = radius/100 area =", "* pow(10,-7) def avgEMF(): turns = input(\"Input how many turns:", "def avgEMF(): turns = input(\"Input how many turns: \") radius", "numpy as np import math extraNumber = 4 * math.pi", "= input(\"Input how many turns: \") radius = input(\"Input the", "avgEMF(): turns = input(\"Input how many turns: \") radius =", "how many turns: \") radius = input(\"Input the radius (cm):\")", "np import math extraNumber = 4 * math.pi * pow(10,-7)", "import numpy as np import math extraNumber = 4 *", "\") radius = input(\"Input the radius (cm):\") resistance = input(\"Input", "= input(\"Input the radius (cm):\") resistance = input(\"Input resistance (Ω):", "= float(resistance) magField0 = float(magField0) magField1 = float(magField1) time =", "resistance (Ω): \") magField0 = input(\"Input the first magnetic Field", "radius = input(\"Input the radius (cm):\") resistance = input(\"Input resistance", "math extraNumber = 4 * math.pi * pow(10,-7) def avgEMF():", "input(\"Input how many turns: \") radius = input(\"Input the radius", "time = input(\"Input the time (s): \") turns = float(turns)", "\") magField1 = input(\"Input the second magnetic Field value (T):", "many turns: \") radius = input(\"Input the radius (cm):\") resistance", "the first magnetic Field value (T): \") magField1 = input(\"Input", "= input(\"Input the time (s): \") turns = float(turns) radius", "pow(radius,2)*math.pi averageEMF = turns * area * ((magField1-magField0)/time) print(averageEMF) avgEMF()", "magnetic Field value (T): \") time = input(\"Input the time", "(s): \") turns = float(turns) radius = float(radius) resistance =", "= input(\"Input the first magnetic Field value (T): \") magField1", "the time (s): \") turns = float(turns) radius = float(radius)", "radius = radius/100 area = pow(radius,2)*math.pi averageEMF = turns *", "value (T): \") magField1 = input(\"Input the second magnetic Field" ]
[ "fps=70): \"\"\" Computes a magnitude spectrogram at a given sample", "OUTPUT_ON_GPU = True from testfile import make_test_signal def spectrogram(signal, sample_rate=22050,", "the following here because cupy cannot do stride tricks #", "do stride tricks # the actual copying work is included", "if not INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32)) # already blown up", "OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return spect.get() def main(): # load input", "// 70 frame_len = 1024 frames = len(x) - frame_len", "Computes the spectrogram of a test signal using cupy and", "Hz), frame length (in samples) and frame rate (in Hz),", "+ 1 x = np.lib.stride_tricks.as_strided( x, (frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size]", "sample_rate=22050, frame_len=1024, fps=70): \"\"\" Computes a magnitude spectrogram at a", "perform FFT spect = cp.fft.rfft(signal) # convert into magnitude spectrogram", "spectrogram of a test signal using cupy and cuFFT. Author:", "x, spectrogram x = make_test_signal() # we do the following", "the benchmark unless INPUT_ON_GPU hop_size = 22050 // 70 frame_len", "(frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU: x = cp.array(x.astype(np.float32)) #", "x = cp.array(x.astype(np.float32)) # benchmark times = timeit.repeat( setup='from __main__", "= timeit.repeat( setup='from __main__ import x, spectrogram', stmt='spectrogram(x)', repeat=5, number=32)", "# the actual copying work is included in the benchmark", "tricks # the actual copying work is included in the", "x = make_test_signal() # we do the following here because", "and cuFFT. Author: <NAME> \"\"\" import sys import os import", "import x, spectrogram', stmt='spectrogram(x)', repeat=5, number=32) print(\"Took %.3fs.\" % (min(times)", "of frames win = cp.hanning(frame_len).astype(cp.float32) # apply window function #signal", "= True OUTPUT_ON_GPU = True from testfile import make_test_signal def", "list of frames win = cp.hanning(frame_len).astype(cp.float32) # apply window function", "rate (in Hz), frame length (in samples) and frame rate", "INPUT_ON_GPU: x = cp.array(x.astype(np.float32)) # benchmark times = timeit.repeat( setup='from", "(in Hz), on CUDA using cupy. \"\"\" if not INPUT_ON_GPU:", "this doesn't work correctly for some reason. signal = signal", "- frame_len + 1 x = np.lib.stride_tricks.as_strided( x, (frames, frame_len),", "a list of frames win = cp.hanning(frame_len).astype(cp.float32) # apply window", "1 x = np.lib.stride_tricks.as_strided( x, (frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size] if", "# apply window function #signal *= win # this doesn't", "frame length (in samples) and frame rate (in Hz), on", "spectrogram', stmt='spectrogram(x)', repeat=5, number=32) print(\"Took %.3fs.\" % (min(times) / 32))", "at a given sample rate (in Hz), frame length (in", "# -*- coding: utf-8 -*- \"\"\" Computes the spectrogram of", "reason. signal = signal * win # perform FFT spect", "result #assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2] + 'npy', spectrogram(x)) if __name__==\"__main__\":", "cuFFT. Author: <NAME> \"\"\" import sys import os import timeit", "make_test_signal def spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70): \"\"\" Computes a magnitude", "using cupy and cuFFT. Author: <NAME> \"\"\" import sys import", "sys import os import timeit import numpy as np import", "signal = signal * win # perform FFT spect =", "= 22050 // 70 frame_len = 1024 frames = len(x)", "doesn't work correctly for some reason. signal = signal *", "# we do the following here because cupy cannot do", "signal = cp.array(signal.astype(np.float32)) # already blown up to a list", "Computes a magnitude spectrogram at a given sample rate (in", "(x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU: x = cp.array(x.astype(np.float32)) # benchmark times", "of a test signal using cupy and cuFFT. Author: <NAME>", "correctly for some reason. signal = signal * win #", "import timeit import numpy as np import cupy as cp", "cp.cuda.get_current_stream().synchronize() else: return spect.get() def main(): # load input global", "1024 frames = len(x) - frame_len + 1 x =", "# perform FFT spect = cp.fft.rfft(signal) # convert into magnitude", "load input global x, spectrogram x = make_test_signal() # we", "a test signal using cupy and cuFFT. Author: <NAME> \"\"\"", "__main__ import x, spectrogram', stmt='spectrogram(x)', repeat=5, number=32) print(\"Took %.3fs.\" %", "else: return spect.get() def main(): # load input global x,", "number=32) print(\"Took %.3fs.\" % (min(times) / 32)) # save result", "do the following here because cupy cannot do stride tricks", "= cp.hanning(frame_len).astype(cp.float32) # apply window function #signal *= win #", "/ 32)) # save result #assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2] +", "on CUDA using cupy. \"\"\" if not INPUT_ON_GPU: signal =", "copying work is included in the benchmark unless INPUT_ON_GPU hop_size", "frame_len = 1024 frames = len(x) - frame_len + 1", "<reponame>zhouxzh/Jetson_nano_stft_benchmark #!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" Computes", "frame_len + 1 x = np.lib.stride_tricks.as_strided( x, (frames, frame_len), (x.strides[0],", "numpy as np import cupy as cp INPUT_ON_GPU = True", "given sample rate (in Hz), frame length (in samples) and", "actual copying work is included in the benchmark unless INPUT_ON_GPU", "a given sample rate (in Hz), frame length (in samples)", "from testfile import make_test_signal def spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70): \"\"\"", "x, spectrogram', stmt='spectrogram(x)', repeat=5, number=32) print(\"Took %.3fs.\" % (min(times) /", "win # this doesn't work correctly for some reason. signal", "CUDA using cupy. \"\"\" if not INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32))", "python # -*- coding: utf-8 -*- \"\"\" Computes the spectrogram", "%.3fs.\" % (min(times) / 32)) # save result #assert not", "(min(times) / 32)) # save result #assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2]", "spectrogram x = make_test_signal() # we do the following here", "\"\"\" if not INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32)) # already blown", "= cp.array(signal.astype(np.float32)) # already blown up to a list of", "because cupy cannot do stride tricks # the actual copying", "timeit.repeat( setup='from __main__ import x, spectrogram', stmt='spectrogram(x)', repeat=5, number=32) print(\"Took", "spect = cp.abs(spect) # return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return", "input global x, spectrogram x = make_test_signal() # we do", "Hz), on CUDA using cupy. \"\"\" if not INPUT_ON_GPU: signal", "to a list of frames win = cp.hanning(frame_len).astype(cp.float32) # apply", "a magnitude spectrogram at a given sample rate (in Hz),", "-*- coding: utf-8 -*- \"\"\" Computes the spectrogram of a", "\"\"\" import sys import os import timeit import numpy as", "here because cupy cannot do stride tricks # the actual", "not INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32)) # already blown up to", "apply window function #signal *= win # this doesn't work", "# this doesn't work correctly for some reason. signal =", "x.strides[0]))[::hop_size] if INPUT_ON_GPU: x = cp.array(x.astype(np.float32)) # benchmark times =", "cp.array(x.astype(np.float32)) # benchmark times = timeit.repeat( setup='from __main__ import x,", "repeat=5, number=32) print(\"Took %.3fs.\" % (min(times) / 32)) # save", "coding: utf-8 -*- \"\"\" Computes the spectrogram of a test", "*= win # this doesn't work correctly for some reason.", "= np.lib.stride_tricks.as_strided( x, (frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU: x", "unless INPUT_ON_GPU hop_size = 22050 // 70 frame_len = 1024", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" Computes the", "= make_test_signal() # we do the following here because cupy", "frames win = cp.hanning(frame_len).astype(cp.float32) # apply window function #signal *=", "os import timeit import numpy as np import cupy as", "following here because cupy cannot do stride tricks # the", "print(\"Took %.3fs.\" % (min(times) / 32)) # save result #assert", "= len(x) - frame_len + 1 x = np.lib.stride_tricks.as_strided( x,", "length (in samples) and frame rate (in Hz), on CUDA", "blown up to a list of frames win = cp.hanning(frame_len).astype(cp.float32)", "and frame rate (in Hz), on CUDA using cupy. \"\"\"", "work is included in the benchmark unless INPUT_ON_GPU hop_size =", "the actual copying work is included in the benchmark unless", "frame_len=1024, fps=70): \"\"\" Computes a magnitude spectrogram at a given", "signal using cupy and cuFFT. Author: <NAME> \"\"\" import sys", "# already blown up to a list of frames win", "* win # perform FFT spect = cp.fft.rfft(signal) # convert", "import numpy as np import cupy as cp INPUT_ON_GPU =", "window function #signal *= win # this doesn't work correctly", "sample rate (in Hz), frame length (in samples) and frame", "make_test_signal() # we do the following here because cupy cannot", "% (min(times) / 32)) # save result #assert not OUTPUT_ON_GPU", "\"\"\" Computes the spectrogram of a test signal using cupy", "cp.array(signal.astype(np.float32)) # already blown up to a list of frames", "for some reason. signal = signal * win # perform", "some reason. signal = signal * win # perform FFT", "# save result #assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2] + 'npy', spectrogram(x))", "spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70): \"\"\" Computes a magnitude spectrogram at", "= True from testfile import make_test_signal def spectrogram(signal, sample_rate=22050, frame_len=1024,", "import sys import os import timeit import numpy as np", "-*- \"\"\" Computes the spectrogram of a test signal using", "(in Hz), frame length (in samples) and frame rate (in", "main(): # load input global x, spectrogram x = make_test_signal()", "32)) # save result #assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2] + 'npy',", "into magnitude spectrogram spect = cp.abs(spect) # return if OUTPUT_ON_GPU:", "frame_len), (x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU: x = cp.array(x.astype(np.float32)) # benchmark", "\"\"\" Computes a magnitude spectrogram at a given sample rate", "cp.hanning(frame_len).astype(cp.float32) # apply window function #signal *= win # this", "spectrogram at a given sample rate (in Hz), frame length", "included in the benchmark unless INPUT_ON_GPU hop_size = 22050 //", "= signal * win # perform FFT spect = cp.fft.rfft(signal)", "= cp.abs(spect) # return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return spect.get()", "cp INPUT_ON_GPU = True OUTPUT_ON_GPU = True from testfile import", "rate (in Hz), on CUDA using cupy. \"\"\" if not", "True OUTPUT_ON_GPU = True from testfile import make_test_signal def spectrogram(signal,", "setup='from __main__ import x, spectrogram', stmt='spectrogram(x)', repeat=5, number=32) print(\"Took %.3fs.\"", "frame rate (in Hz), on CUDA using cupy. \"\"\" if", "cupy and cuFFT. Author: <NAME> \"\"\" import sys import os", "True from testfile import make_test_signal def spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70):", "win = cp.hanning(frame_len).astype(cp.float32) # apply window function #signal *= win", "times = timeit.repeat( setup='from __main__ import x, spectrogram', stmt='spectrogram(x)', repeat=5,", "stride tricks # the actual copying work is included in", "np.lib.stride_tricks.as_strided( x, (frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU: x =", "signal * win # perform FFT spect = cp.fft.rfft(signal) #", "spect = cp.fft.rfft(signal) # convert into magnitude spectrogram spect =", "= 1024 frames = len(x) - frame_len + 1 x", "function #signal *= win # this doesn't work correctly for", "work correctly for some reason. signal = signal * win", "= cp.fft.rfft(signal) # convert into magnitude spectrogram spect = cp.abs(spect)", "benchmark unless INPUT_ON_GPU hop_size = 22050 // 70 frame_len =", "spectrogram spect = cp.abs(spect) # return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else:", "as np import cupy as cp INPUT_ON_GPU = True OUTPUT_ON_GPU", "len(x) - frame_len + 1 x = np.lib.stride_tricks.as_strided( x, (frames,", "# convert into magnitude spectrogram spect = cp.abs(spect) # return", "already blown up to a list of frames win =", "in the benchmark unless INPUT_ON_GPU hop_size = 22050 // 70", "INPUT_ON_GPU = True OUTPUT_ON_GPU = True from testfile import make_test_signal", "global x, spectrogram x = make_test_signal() # we do the", "x = np.lib.stride_tricks.as_strided( x, (frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU:", "hop_size = 22050 // 70 frame_len = 1024 frames =", "Author: <NAME> \"\"\" import sys import os import timeit import", "we do the following here because cupy cannot do stride", "if INPUT_ON_GPU: x = cp.array(x.astype(np.float32)) # benchmark times = timeit.repeat(", "up to a list of frames win = cp.hanning(frame_len).astype(cp.float32) #", "cupy. \"\"\" if not INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32)) # already", "spect.get() def main(): # load input global x, spectrogram x", "def spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70): \"\"\" Computes a magnitude spectrogram", "70 frame_len = 1024 frames = len(x) - frame_len +", "save result #assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2] + 'npy', spectrogram(x)) if", "INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32)) # already blown up to a", "using cupy. \"\"\" if not INPUT_ON_GPU: signal = cp.array(signal.astype(np.float32)) #", "import os import timeit import numpy as np import cupy", "def main(): # load input global x, spectrogram x =", "magnitude spectrogram spect = cp.abs(spect) # return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize()", "as cp INPUT_ON_GPU = True OUTPUT_ON_GPU = True from testfile", "#assert not OUTPUT_ON_GPU #np.save(sys.argv[0][:-2] + 'npy', spectrogram(x)) if __name__==\"__main__\": main()", "convert into magnitude spectrogram spect = cp.abs(spect) # return if", "test signal using cupy and cuFFT. Author: <NAME> \"\"\" import", "# return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return spect.get() def main():", "cp.fft.rfft(signal) # convert into magnitude spectrogram spect = cp.abs(spect) #", "is included in the benchmark unless INPUT_ON_GPU hop_size = 22050", "samples) and frame rate (in Hz), on CUDA using cupy.", "timeit import numpy as np import cupy as cp INPUT_ON_GPU", "testfile import make_test_signal def spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70): \"\"\" Computes", "utf-8 -*- \"\"\" Computes the spectrogram of a test signal", "the spectrogram of a test signal using cupy and cuFFT.", "return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return spect.get() def main(): #", "win # perform FFT spect = cp.fft.rfft(signal) # convert into", "# load input global x, spectrogram x = make_test_signal() #", "benchmark times = timeit.repeat( setup='from __main__ import x, spectrogram', stmt='spectrogram(x)',", "<NAME> \"\"\" import sys import os import timeit import numpy", "np import cupy as cp INPUT_ON_GPU = True OUTPUT_ON_GPU =", "= cp.array(x.astype(np.float32)) # benchmark times = timeit.repeat( setup='from __main__ import", "x, (frames, frame_len), (x.strides[0], x.strides[0]))[::hop_size] if INPUT_ON_GPU: x = cp.array(x.astype(np.float32))", "22050 // 70 frame_len = 1024 frames = len(x) -", "import make_test_signal def spectrogram(signal, sample_rate=22050, frame_len=1024, fps=70): \"\"\" Computes a", "cp.abs(spect) # return if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return spect.get() def", "if OUTPUT_ON_GPU: cp.cuda.get_current_stream().synchronize() else: return spect.get() def main(): # load", "#signal *= win # this doesn't work correctly for some", "INPUT_ON_GPU hop_size = 22050 // 70 frame_len = 1024 frames", "cannot do stride tricks # the actual copying work is", "FFT spect = cp.fft.rfft(signal) # convert into magnitude spectrogram spect", "magnitude spectrogram at a given sample rate (in Hz), frame", "cupy cannot do stride tricks # the actual copying work", "frames = len(x) - frame_len + 1 x = np.lib.stride_tricks.as_strided(", "cupy as cp INPUT_ON_GPU = True OUTPUT_ON_GPU = True from", "stmt='spectrogram(x)', repeat=5, number=32) print(\"Took %.3fs.\" % (min(times) / 32)) #", "(in samples) and frame rate (in Hz), on CUDA using", "return spect.get() def main(): # load input global x, spectrogram", "# benchmark times = timeit.repeat( setup='from __main__ import x, spectrogram',", "import cupy as cp INPUT_ON_GPU = True OUTPUT_ON_GPU = True" ]
[ "check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig): \"\"\" Base class to handle table", "GeotrekConfig): \"\"\" bind for django.contrib.contenttype \"\"\" pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig):", "move on right schemas, and load SQL files !! WARNING", "CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name = 'easy_thumbnails' verbose_name =", "from django.contrib.sessions.apps import SessionsConfig from django.db.models.signals import post_migrate from django_celery_results.apps", "load SQL files !! WARNING !! need to create subclass", "right schemas, and load SQL files !! WARNING !! need", "django.contrib.contenttypes.apps import ContentTypesConfig from django.contrib.sessions.apps import SessionsConfig from django.db.models.signals import", "from django.db.models.signals import post_migrate from django_celery_results.apps import CeleryResultConfig from geotrek.common.utils.signals", "AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind for django.contrib.auth \"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig,", "SQL files !! WARNING !! need to create subclass in", "subclasses here for external subclasses \"\"\" def ready(self): post_migrate.connect(pm_callback, sender=self,", "from django.contrib.contenttypes.apps import ContentTypesConfig from django.contrib.sessions.apps import SessionsConfig from django.db.models.signals", "class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig):", "geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig): \"\"\" Base class to", "GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass", "on right schemas, and load SQL files !! WARNING !!", "from geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig): \"\"\" Base class", "post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind for", "django.contrib.auth \"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind for django.contrib.contenttype", "class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name = 'easy_thumbnails' verbose_name", "def ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\"", "subclasses \"\"\" def ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig,", "check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind for django.contrib.auth \"\"\" pass", "Base class to handle table move on right schemas, and", "django.apps import AppConfig from django.contrib.admin.apps import AdminConfig from django.contrib.auth.apps import", "class GeotrekConfig(AppConfig): \"\"\" Base class to handle table move on", "and load SQL files !! WARNING !! need to create", "pm_callback class GeotrekConfig(AppConfig): \"\"\" Base class to handle table move", "in geotrek.myapp.apps for project apps, and create subclasses here for", "\"\"\" bind for django.contrib.auth \"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\"", "\"\"\" pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass", "external subclasses \"\"\" def ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class", "files !! WARNING !! need to create subclass in geotrek.myapp.apps", "to handle table move on right schemas, and load SQL", "!! WARNING !! need to create subclass in geotrek.myapp.apps for", "geotrek.myapp.apps for project apps, and create subclasses here for external", "SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig):", "subclass in geotrek.myapp.apps for project apps, and create subclasses here", "ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind for django.contrib.contenttype \"\"\" pass class SessionsGeotrekConfig(SessionsConfig,", "pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name = 'easy_thumbnails' verbose_name = 'Easy thumbnails'", "CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name = 'easy_thumbnails' verbose_name = 'Easy", "pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind for django.contrib.contenttype \"\"\" pass", "AppConfig from django.contrib.admin.apps import AdminConfig from django.contrib.auth.apps import AuthConfig from", "to create subclass in geotrek.myapp.apps for project apps, and create", "django.contrib.auth.apps import AuthConfig from django.contrib.contenttypes.apps import ContentTypesConfig from django.contrib.sessions.apps import", "project apps, and create subclasses here for external subclasses \"\"\"", "\"\"\" Base class to handle table move on right schemas,", "django.db.models.signals import post_migrate from django_celery_results.apps import CeleryResultConfig from geotrek.common.utils.signals import", "from django_celery_results.apps import CeleryResultConfig from geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback class", "AdminConfig from django.contrib.auth.apps import AuthConfig from django.contrib.contenttypes.apps import ContentTypesConfig from", "class to handle table move on right schemas, and load", "post_migrate from django_celery_results.apps import CeleryResultConfig from geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback", "ContentTypesConfig from django.contrib.sessions.apps import SessionsConfig from django.db.models.signals import post_migrate from", "import CeleryResultConfig from geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig): \"\"\"", "import check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig): \"\"\" Base class to handle", "django.contrib.contenttype \"\"\" pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig):", "class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind for django.contrib.auth \"\"\" pass class", "import AppConfig from django.contrib.admin.apps import AdminConfig from django.contrib.auth.apps import AuthConfig", "GeotrekConfig(AppConfig): \"\"\" Base class to handle table move on right", "django.contrib.sessions.apps import SessionsConfig from django.db.models.signals import post_migrate from django_celery_results.apps import", "for external subclasses \"\"\" def ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit()", "handle table move on right schemas, and load SQL files", "for django.contrib.auth \"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind for", "import AuthConfig from django.contrib.contenttypes.apps import ContentTypesConfig from django.contrib.sessions.apps import SessionsConfig", "schemas, and load SQL files !! WARNING !! need to", "table move on right schemas, and load SQL files !!", "apps, and create subclasses here for external subclasses \"\"\" def", "SessionsConfig from django.db.models.signals import post_migrate from django_celery_results.apps import CeleryResultConfig from", "\"\"\" def ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig):", "pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name = 'easy_thumbnails'", "sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind for django.contrib.auth", "for project apps, and create subclasses here for external subclasses", "and create subclasses here for external subclasses \"\"\" def ready(self):", "import SessionsConfig from django.db.models.signals import post_migrate from django_celery_results.apps import CeleryResultConfig", "pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class", "from django.contrib.admin.apps import AdminConfig from django.contrib.auth.apps import AuthConfig from django.contrib.contenttypes.apps", "class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind for django.contrib.contenttype \"\"\" pass class", "ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind", "bind for django.contrib.auth \"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind", "import AdminConfig from django.contrib.auth.apps import AuthConfig from django.contrib.contenttypes.apps import ContentTypesConfig", "dispatch_uid='geotrek.core.pm_callback') check_srid_has_meter_unit() class AuthGeotrekConfig(AuthConfig, GeotrekConfig): \"\"\" bind for django.contrib.auth \"\"\"", "import post_migrate from django_celery_results.apps import CeleryResultConfig from geotrek.common.utils.signals import check_srid_has_meter_unit,", "django.contrib.admin.apps import AdminConfig from django.contrib.auth.apps import AuthConfig from django.contrib.contenttypes.apps import", "here for external subclasses \"\"\" def ready(self): post_migrate.connect(pm_callback, sender=self, dispatch_uid='geotrek.core.pm_callback')", "import ContentTypesConfig from django.contrib.sessions.apps import SessionsConfig from django.db.models.signals import post_migrate", "pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class", "GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name =", "!! need to create subclass in geotrek.myapp.apps for project apps,", "from django.contrib.auth.apps import AuthConfig from django.contrib.contenttypes.apps import ContentTypesConfig from django.contrib.sessions.apps", "CeleryResultConfig from geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig): \"\"\" Base", "need to create subclass in geotrek.myapp.apps for project apps, and", "create subclasses here for external subclasses \"\"\" def ready(self): post_migrate.connect(pm_callback,", "GeotrekConfig): \"\"\" bind for django.contrib.auth \"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig):", "from django.apps import AppConfig from django.contrib.admin.apps import AdminConfig from django.contrib.auth.apps", "\"\"\" pass class ContenttypeGeotrekConfig(ContentTypesConfig, GeotrekConfig): \"\"\" bind for django.contrib.contenttype \"\"\"", "for django.contrib.contenttype \"\"\" pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig,", "AuthConfig from django.contrib.contenttypes.apps import ContentTypesConfig from django.contrib.sessions.apps import SessionsConfig from", "create subclass in geotrek.myapp.apps for project apps, and create subclasses", "django_celery_results.apps import CeleryResultConfig from geotrek.common.utils.signals import check_srid_has_meter_unit, pm_callback class GeotrekConfig(AppConfig):", "WARNING !! need to create subclass in geotrek.myapp.apps for project", "class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig,", "\"\"\" bind for django.contrib.contenttype \"\"\" pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass", "bind for django.contrib.contenttype \"\"\" pass class SessionsGeotrekConfig(SessionsConfig, GeotrekConfig): pass class", "AdminGeotrekConfig(AdminConfig, GeotrekConfig): pass class CeleryGeotrekConfig(GeotrekConfig, CeleryResultConfig): pass class EasyThumbnailsGeotrekConfig(GeotrekConfig): name" ]
[ "n_reps = specific_params.get('n_reps', config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run", "as prof: run_model(criterion, *model, *inp, optimizer=optimizer) end.record() start_sync = time.time()", "# Annotate where the final run starts in the log", "use_dtr: # operators-only time, tracked by DTR cuda_time = torch.compute_time()", "\"a\") as fout: val_dict = { 'network': model_name_replace_dict.get(model_name, model_name), 'algorithm':", "pytorch's cuda elapsed time is already in ms 'gpu_time': float(data['gpu_time']),", "avoid any issues of memory hanging around between inputs, we", "gen_input(i, specific_params.get('extra_params', dict())) n_reps = specific_params.get('n_reps', config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling)", "validate_config import validate_trials_config from pt_trial_util import create_csv_writer from tqdm import", "save_log = use_dtr and specific_params.get('save_logs', config['save_logs']) and i == config['n_inputs']", "profiling info use_profiling = use_dtr and specific_params.get('use_profiling', False) use_cudnn =", "assert 'batch_size' in specific_params if use_dtr: assert 'memory_budget' in specific_params", "in os.environ: model_name = os.environ['DTR_MODEL_NAME'] config, msg = validate_trials_config(config_dir) if", "to look for one) delete_logs() # we only save logs", "measurements \"\"\" torch.cuda.reset_max_memory_allocated() # resetting means the count should be", "specific_params.get('use_profiling', False) use_cudnn = model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion =", "data['total_mem']*1e-6, 'memory_budget': memory_budget, # profiling (reported in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6,", "conversions now: times in ms, # memory in MB writer.writerow({", "1 most_recent = all_logs[0] # rename and move # (new", "train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\" with open(out_file, \"a\") as fout: val_dict", "a baseline memory usage if trial_run: write_json(os.getcwd(), trial_run_outfile, { 'mem'", "start timing torch.cuda.synchronize() start_time = time.time() if use_dtr: torch.reset_profile() start.record()", "progress.set_description(f'Rep [{j}]' + '' if j > dry_run else f'Dry", "results model_name_replace_dict = { 'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50', } train_ips_list", "inputs, we run each input as a separate process. A", "return result def timing_loop(model_name, i, config, use_dtr, specific_params, writer, trial_run=False,", "data['cost_time']*1e-6, 'rep': j - dry_run, 'input': i, **specific_params }) def", "__name__ == '__main__': invoke_main(main, 'config_dir', 'experiment_mode', 'model_name', 'input_idx', 'params_file', 'out_file',", "None for res in measurements: batch_size = res['batch_size'] train_ips_list.append(res['ips']) out_file", "any issues of memory hanging around between inputs, we run", "specific_params, filename) def delete_logs(): for log in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log)", "result['cuda_time'] = cuda_time else: result['cuda_time'] = -1.0 return result def", "sim_conf_filename, dict()) conf = read_json(dest_dir, sim_conf_filename) if model_name not in", "train_ips_list = [] batch_size = None for res in measurements:", "-1) # clean up after ourselves delete_logs() # do all", "= torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) # start timing torch.cuda.synchronize() start_time", "inp = gen_input(i, specific_params.get('extra_params', dict())) n_reps = specific_params.get('n_reps', config['n_reps']) if", "getting a baseline memory usage if trial_run: write_json(os.getcwd(), trial_run_outfile, {", "= use_dtr and specific_params.get('save_logs', config['save_logs']) and i == config['n_inputs'] -", "'sync_time': data['sync_time']*1e3, # pytorch's cuda elapsed time is already in", "any logs hanging around (so we only have to look", "(new name just appends info to the old one) batch_size", "data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep': j - dry_run, 'input':", "operators-only time, tracked by DTR cuda_time = torch.compute_time() base_compute_time =", "i, config, use_dtr, specific_params, writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run =", "the last DTR log produced in the trial (if any", "= config['dry_run'] measurements = [] print(f'Running {model_name} : {specific_params}') #", "run [{j}]') gc.collect() # Annotate where the final run starts", "all logs in advance, there should be at most one", "the old one) batch_size = specific_params['batch_size'] budget = specific_params['memory_budget'] if", "if use_dtr: # operators-only time, tracked by DTR cuda_time =", "Find the last DTR log produced in the trial (if", "specific_params['batch_size'], use_dtr=use_dtr) inp = gen_input(i, specific_params.get('extra_params', dict())) n_reps = specific_params.get('n_reps',", "cuda elapsed time is already in ms 'gpu_time': float(data['gpu_time']), #", "params, esp. for DTR specific_params = read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET'", "+ '' if j > dry_run else f'Dry run [{j}]')", "the model outside of the loop, DTR's logs have shown", "main(config_dir, experiment_mode, model_name, input_idx, params_file, out_file, trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME'", "run each input as a separate process. A little ugly", "run_model(criterion, *model, *inp, optimizer=optimizer) end.record() start_sync = time.time() torch.cuda.synchronize() end_sync", "assert 'memory_budget' in specific_params if specific_params['memory_budget'] > 0: print(f'Setting budget", "{specific_params}') # remove any logs hanging around (so we only", "last DTR log produced in the trial (if any exist)", "}) write_json(dest_dir, sim_conf_filename, conf) def save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params, is_baseline=False):", "-1 if use_profiling: base_compute_time = torch.base_compute_time() remat_compute_time = torch.remat_compute_time() search_time", "use_dtr: torch.toggle_log(False) del params batch_size = len(inp[0]) ips = batch_size", "process. A little ugly but effective \"\"\" import gc import", "= len(inp[0]) ips = batch_size / (end_time - start_time) result", "'resnet152', 'tv_resnet50': 'resnet50', } train_ips_list = [] batch_size = None", "config['n_inputs'] - 1 if use_dtr: torch.toggle_log(False) # whether to report", "i, **specific_params }) def main(config_dir, experiment_mode, model_name, input_idx, params_file, out_file,", "# do all the writing after the trial is over", "a separate *function scope* turned out to be the only", "any exist) and move it to the directory \"\"\" all_logs", "'DTR_MODEL_NAME' in os.environ: model_name = os.environ['DTR_MODEL_NAME'] config, msg = validate_trials_config(config_dir)", "loop, DTR's logs have shown that certain constants in the", "and specific_params.get('use_profiling', False) use_cudnn = model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion", "{model_name} : {specific_params}') # remove any logs hanging around (so", "ips = batch_size / (end_time - start_time) result = {", "False) use_cudnn = model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion = model_util.get_criterion(model_name)", "writing after the trial is over for j in range(len(measurements)):", "and specific_params.get('save_logs', config['save_logs']) and i == config['n_inputs'] - 1 if", "rename and move # (new name just appends info to", "# handle specific params, esp. for DTR specific_params = read_json(cwd,", "ugly but effective \"\"\" import gc import glob import json", "return # if we delete all logs in advance, there", "cost_time = -1 if use_profiling: base_compute_time = torch.base_compute_time() remat_compute_time =", "- start_time, 'sync_time': end_sync - start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem': input_mem,", "conf[model_name] = [] conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers',", "be at most one log assert len(all_logs) == 1 most_recent", "# only what's in scope, meaning only the input input_mem", "assert len(all_logs) == 1 most_recent = all_logs[0] # rename and", "constants hang around. Returns a dict of measurements \"\"\" torch.cuda.reset_max_memory_allocated()", "look for one) delete_logs() # we only save logs for", "open(out_file, 'a', newline='') as csvfile: writer = create_csv_writer(csvfile, specific_params) timing_loop(model_name,", "'ips': ips } if use_dtr: result['cuda_time'] = cuda_time else: result['cuda_time']", "dry_run else f'Dry run [{j}]') gc.collect() # Annotate where the", "dict of measurements \"\"\" torch.cuda.reset_max_memory_allocated() # resetting means the count", "j in progress: progress.set_description(f'Rep [{j}]' + '' if j >", "params batch_size = len(inp[0]) ips = batch_size / (end_time -", "'*.log')) if not all_logs: return # if we delete all", "1 use_dtr = (experiment_mode == 'dtr') i = int(input_idx) is_trial", "conf: conf[model_name] = [] conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers':", "profiling (reported in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6,", "csvfile: writer = create_csv_writer(csvfile, specific_params) timing_loop(model_name, i, config, use_dtr, specific_params,", "specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in specific_params if use_dtr: assert", "start.elapsed_time(end), 'input_mem': input_mem, 'model_mem': model_mem, 'total_mem': total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time':", "specific_params['memory_budget'], 'batch_size': batch_size, 'ips': np.median(train_ips_list) if train_ips_list else -1, }", "trial_run: write_json(os.getcwd(), trial_run_outfile, { 'mem' : max(map(lambda data: data['total_mem'], measurements))", "prof: run_model(criterion, *model, *inp, optimizer=optimizer) end.record() start_sync = time.time() torch.cuda.synchronize()", "}) return if save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name, specific_params, is_baseline=specific_params['memory_budget']", "write_json(dest_dir, sim_conf_filename, dict()) conf = read_json(dest_dir, sim_conf_filename) if model_name not", "start_sync = time.time() torch.cuda.synchronize() end_sync = time.time() end_time = time.time()", "To avoid any issues of memory hanging around between inputs,", "over for j in range(len(measurements)): data = measurements[j] # do", "usage if trial_run: write_json(os.getcwd(), trial_run_outfile, { 'mem' : max(map(lambda data:", "# clean up after ourselves delete_logs() # do all the", "if we delete all logs in advance, there should be", "- start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem': input_mem, 'model_mem': model_mem, 'total_mem': total_mem,", "model_mem = torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4) start", "create_csv_writer from tqdm import tqdm import model_util def extend_simrd_config(dest_dir, sim_conf_filename,", "if j >= dry_run: measurements.append(res) # Dump results model_name_replace_dict =", "cwd = os.getcwd() # handle specific params, esp. for DTR", "and sim_conf_filename is not None: extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, filename)", "time, tracked by DTR cuda_time = torch.compute_time() base_compute_time = -1", "memory in MB writer.writerow({ 'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3, # pytorch's", "= -1 if use_profiling: base_compute_time = torch.base_compute_time() remat_compute_time = torch.remat_compute_time()", "have shown that certain constants in the model persist between", "sim_conf_filename, model_name, specific_params, is_baseline=False): \"\"\" Find the last DTR log", "if use_profiling: base_compute_time = torch.base_compute_time() remat_compute_time = torch.remat_compute_time() search_time =", "new_name) os.rename(most_recent, filename) if is_baseline and sim_conf_filename is not None:", "not all_logs: return # if we delete all logs in", "only what's in scope, meaning only the input input_mem =", "torch.base_compute_time() remat_compute_time = torch.remat_compute_time() search_time = torch.search_time() cost_time = torch.cost_time()", "'*.log')): os.remove(log) def run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params,", "prepare_out_file, check_file_exists from validate_config import validate_trials_config from pt_trial_util import create_csv_writer", "sim_conf_filename is not None: extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, filename) def", "str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log': log_name, 'has_start': True", "numpy as np import torch from common import invoke_main, read_json,", "exist) and move it to the directory \"\"\" all_logs =", "the count should be reset to # only what's in", "conf) def save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params, is_baseline=False): \"\"\" Find the", "'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time': search_time, 'cost_time': cost_time, 'batch_size': batch_size,", "-1.0 return result def timing_loop(model_name, i, config, use_dtr, specific_params, writer,", "the final input on DTR save_log = use_dtr and specific_params.get('save_logs',", "info use_profiling = use_dtr and specific_params.get('use_profiling', False) use_cudnn = model_util.use_cudnn(model_name)", "for j in range(len(measurements)): data = measurements[j] # do unit", "do unit conversions now: times in ms, # memory in", "= '{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent)) filename = prepare_out_file(dest_dir, new_name) os.rename(most_recent,", "if budget < 0: budget = 'inf' new_name = '{}-{}-{}-{}'.format(model_name,", "between inputs, we run each input as a separate process.", "= torch.compute_time() base_compute_time = -1 remat_compute_time = -1 search_time =", "is not None: extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, filename) def delete_logs():", "in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def run_single_measurement(model_name, produce_model, run_model, teardown, inp,", "- start_time) result = { 'time': end_time - start_time, 'sync_time':", "start.record() # with torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion, *model, *inp, optimizer=optimizer)", "if use_dtr: assert 'memory_budget' in specific_params if specific_params['memory_budget'] > 0:", "might seem most reasonable to initialize the model outside of", "a single measurement of the model on the given input.", "== config['n_inputs'] - 1 if use_dtr: torch.toggle_log(False) # whether to", "= model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion = model_util.get_criterion(model_name) produce_model, gen_input,", "total_mem = torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del model if use_dtr: torch.toggle_log(False)", "the given input. While it might seem most reasonable to", "in measurements: batch_size = res['batch_size'] train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\" with", "ms, # memory in MB writer.writerow({ 'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3,", "res = run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params=specific_params.get('extra_params', dict()),", "os.environ: model_name = os.environ['DTR_MODEL_NAME'] config, msg = validate_trials_config(config_dir) if config", "model_util def extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, log_name): if not check_file_exists(dest_dir,", "batch_size = res['batch_size'] train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\" with open(out_file, \"a\")", "0: print(f'Setting budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name, i,", "should be reset to # only what's in scope, meaning", "at most one log assert len(all_logs) == 1 most_recent =", "model_name = os.environ['DTR_MODEL_NAME'] config, msg = validate_trials_config(config_dir) if config is", "= trial_run == 'True' if config['set_seed']: torch.manual_seed(config['seed'] + i) random.seed(config['seed']", "'inf' new_name = '{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent)) filename = prepare_out_file(dest_dir,", "import tqdm import model_util def extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, log_name):", "specific_params, writer, memory_budget=specific_params.get('memory_budget', -1)) if __name__ == '__main__': invoke_main(main, 'config_dir',", "del params batch_size = len(inp[0]) ips = batch_size / (end_time", "common import invoke_main, read_json, write_json, prepare_out_file, check_file_exists from validate_config import", "input_mem, 'model_mem': model_mem, 'total_mem': total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time':", "run_model, teardown, inp, criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling) if j", "sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict()) conf = read_json(dest_dir, sim_conf_filename)", "specific_params.get('extra_params', dict())) n_reps = specific_params.get('n_reps', config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling) progress", "only when this trial is not # for getting a", ": {specific_params}') # remove any logs hanging around (so we", "import random import time import numpy as np import torch", "config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run + n_reps)) for", "it to the directory \"\"\" all_logs = glob.glob(os.path.join(os.getcwd(), '*.log')) if", "os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in specific_params if use_dtr:", "if specific_params['memory_budget'] > 0: print(f'Setting budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if", "\"\"\" To avoid any issues of memory hanging around between", "'network': model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr', 'budget': specific_params['memory_budget'], 'batch_size': batch_size, 'ips':", "[] batch_size = None for res in measurements: batch_size =", "**specific_params }) def main(config_dir, experiment_mode, model_name, input_idx, params_file, out_file, trial_run=False,", "if use_dtr: result['cuda_time'] = cuda_time else: result['cuda_time'] = -1.0 return", "'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep': j", "import numpy as np import torch from common import invoke_main,", "write_json, prepare_out_file, check_file_exists from validate_config import validate_trials_config from pt_trial_util import", "= (experiment_mode == 'dtr') i = int(input_idx) is_trial = trial_run", "produced in the trial (if any exist) and move it", "in advance, there should be at most one log assert", "glob.glob(os.path.join(os.getcwd(), '*.log')) if not all_logs: return # if we delete", "name just appends info to the old one) batch_size =", "+ \"\\n\") print(f\"save results to {out_file}\") # write to csv", "use_dtr and specific_params.get('save_logs', config['save_logs']) and i == config['n_inputs'] - 1", "timing_loop(model_name, i, config, use_dtr, specific_params, None, True, trial_run_outfile) return with", "info to the old one) batch_size = specific_params['batch_size'] budget =", "as a separate process. A little ugly but effective \"\"\"", "logs for the final input on DTR save_log = use_dtr", "torch.search_time() cost_time = torch.cost_time() torch.reset_profile() total_mem = torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated()", "if use_dtr: torch.toggle_log(False) # whether to report profiling info use_profiling", "print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save results to {out_file}\") # write", "= all_logs[0] # rename and move # (new name just", "= res['batch_size'] train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\" with open(out_file, \"a\") as", "this trial is not # for getting a baseline memory", "torch.cuda.Event(enable_timing=True) # start timing torch.cuda.synchronize() start_time = time.time() if use_dtr:", "to report profiling info use_profiling = use_dtr and specific_params.get('use_profiling', False)", "-1)) if __name__ == '__main__': invoke_main(main, 'config_dir', 'experiment_mode', 'model_name', 'input_idx',", "specific_params['memory_budget'] > 0: print(f'Setting budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial:", "the log if save_log and j == dry_run + n_reps", "== -1) # clean up after ourselves delete_logs() # do", "float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in specific_params if use_dtr: assert 'memory_budget' in", "= time.time() end_time = time.time() # end timing if use_dtr:", "torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True)", "float(data['cuda_time']) * 1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget':", "pt_trial_util import create_csv_writer from tqdm import tqdm import model_util def", "not in conf: conf[model_name] = [] conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size':", "teardown, inp, criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling) if j >=", "specific_params, is_baseline=specific_params['memory_budget'] == -1) # clean up after ourselves delete_logs()", "use_dtr = (experiment_mode == 'dtr') i = int(input_idx) is_trial =", "0: budget = 'inf' new_name = '{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent))", "'True' if config['set_seed']: torch.manual_seed(config['seed'] + i) random.seed(config['seed'] + i) cwd", "check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict()) conf = read_json(dest_dir,", "= time.time() if use_dtr: torch.reset_profile() start.record() # with torch.autograd.profiler.profile(use_cuda=True) as", "and j == dry_run + n_reps - 1: torch.toggle_log(True) torch.annotate_log('START')", "a separate process. A little ugly but effective \"\"\" import", "as np import torch from common import invoke_main, read_json, write_json,", "cuda_time = torch.compute_time() base_compute_time = -1 remat_compute_time = -1 search_time", "run_model, teardown = model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp = gen_input(i, specific_params.get('extra_params',", "} print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save results to {out_file}\") #", "writer, memory_budget=specific_params.get('memory_budget', -1)) if __name__ == '__main__': invoke_main(main, 'config_dir', 'experiment_mode',", "momentum=0.9, weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) # start", "input on DTR save_log = use_dtr and specific_params.get('save_logs', config['save_logs']) and", "'algorithm': 'dtr', 'budget': specific_params['memory_budget'], 'batch_size': batch_size, 'ips': np.median(train_ips_list) if train_ips_list", "import os import random import time import numpy as np", "to initialize the model outside of the loop, DTR's logs", "model if use_dtr: torch.toggle_log(False) del params batch_size = len(inp[0]) ips", "j > dry_run else f'Dry run [{j}]') gc.collect() # Annotate", "measurements[j] # do unit conversions now: times in ms, #", "filename) def delete_logs(): for log in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def", "prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict()) conf = read_json(dest_dir, sim_conf_filename) if", "'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep': j - dry_run,", "trial_run_outfile) return with open(out_file, 'a', newline='') as csvfile: writer =", "seem most reasonable to initialize the model outside of the", "hang around. Returns a dict of measurements \"\"\" torch.cuda.reset_max_memory_allocated() #", "file only when this trial is not # for getting", "just appends info to the old one) batch_size = specific_params['batch_size']", "read_json(dest_dir, sim_conf_filename) if model_name not in conf: conf[model_name] = []", "in MB writer.writerow({ 'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3, # pytorch's cuda", "input_mem = torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params) params = [] for", "*function scope* turned out to be the only way to", "= -1 cost_time = -1 if use_profiling: base_compute_time = torch.base_compute_time()", "{ 'time': end_time - start_time, 'sync_time': end_sync - start_sync, 'gpu_time':", "if __name__ == '__main__': invoke_main(main, 'config_dir', 'experiment_mode', 'model_name', 'input_idx', 'params_file',", "dict()) conf = read_json(dest_dir, sim_conf_filename) if model_name not in conf:", "tqdm import model_util def extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, log_name): if", "read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert", "}) def main(config_dir, experiment_mode, model_name, input_idx, params_file, out_file, trial_run=False, trial_run_outfile=None):", "from validate_config import validate_trials_config from pt_trial_util import create_csv_writer from tqdm", "ips } if use_dtr: result['cuda_time'] = cuda_time else: result['cuda_time'] =", "DTR specific_params = read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget']", "params_file) if 'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size'", "model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr', 'budget': specific_params['memory_budget'], 'batch_size': batch_size, 'ips': np.median(train_ips_list)", "'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log': log_name, 'has_start':", "use_profiling: torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run + n_reps)) for j in", "\"speed_results.tsv\" with open(out_file, \"a\") as fout: val_dict = { 'network':", "log_name): if not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict())", "result['cuda_time'] = -1.0 return result def timing_loop(model_name, i, config, use_dtr,", "\"\"\" torch.cuda.reset_max_memory_allocated() # resetting means the count should be reset", "if use_profiling: torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run + n_reps)) for j", "budget = specific_params['memory_budget'] if budget < 0: budget = 'inf'", "count should be reset to # only what's in scope,", "time.time() torch.cuda.synchronize() end_sync = time.time() end_time = time.time() # end", "remat_compute_time = -1 search_time = -1 cost_time = -1 if", "= os.environ['DTR_MODEL_NAME'] config, msg = validate_trials_config(config_dir) if config is None:", "dry_run: measurements.append(res) # Dump results model_name_replace_dict = { 'tv_resnet152': 'resnet152',", "way to prevent having those constants hang around. Returns a", "experiment_mode, model_name, input_idx, params_file, out_file, trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME' in", "config.get('simrd_config', None), model_name, specific_params, is_baseline=specific_params['memory_budget'] == -1) # clean up", "only have to look for one) delete_logs() # we only", "end_sync = time.time() end_time = time.time() # end timing if", "'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in specific_params", "criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling) if j >= dry_run: measurements.append(res)", "for res in measurements: batch_size = res['batch_size'] train_ips_list.append(res['ips']) out_file =", "save logs for the final input on DTR save_log =", "in os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in specific_params if", "base_compute_time = -1 remat_compute_time = -1 search_time = -1 cost_time", "timing_loop(model_name, i, config, use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget', -1)) if __name__", "hanging around between inputs, we run each input as a", "'has_start': True }) write_json(dest_dir, sim_conf_filename, conf) def save_trial_log(dest_dir, sim_conf_filename, model_name,", "trial_run == 'True' if config['set_seed']: torch.manual_seed(config['seed'] + i) random.seed(config['seed'] +", "*model, *inp, optimizer=optimizer) end.record() start_sync = time.time() torch.cuda.synchronize() end_sync =", "batch_size / (end_time - start_time) result = { 'time': end_time", "data['sync_time']*1e3, # pytorch's cuda elapsed time is already in ms", "time.time() # end timing if use_dtr: # operators-only time, tracked", "save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name, specific_params, is_baseline=specific_params['memory_budget'] == -1) # clean", "torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run + n_reps)) for j in progress:", "= os.getcwd() # handle specific params, esp. for DTR specific_params", "True, trial_run_outfile) return with open(out_file, 'a', newline='') as csvfile: writer", "in scope, meaning only the input input_mem = torch.cuda.max_memory_allocated() model", "the writing after the trial is over for j in", "# (new name just appends info to the old one)", "issues of memory hanging around between inputs, we run each", "measurements.append(res) # Dump results model_name_replace_dict = { 'tv_resnet152': 'resnet152', 'tv_resnet50':", "# Dump results model_name_replace_dict = { 'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50',", "{out_file}\") # write to csv file only when this trial", "\"\"\" all_logs = glob.glob(os.path.join(os.getcwd(), '*.log')) if not all_logs: return #", "np import torch from common import invoke_main, read_json, write_json, prepare_out_file,", "model_name, specific_params, is_baseline=specific_params['memory_budget'] == -1) # clean up after ourselves", "torch from common import invoke_main, read_json, write_json, prepare_out_file, check_file_exists from", "torch.reset_profile() start.record() # with torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion, *model, *inp,", "progress = tqdm(range(dry_run + n_reps)) for j in progress: progress.set_description(f'Rep", "if model_name not in conf: conf[model_name] = [] conf[model_name].append({ 'name':", "out to be the only way to prevent having those", "config['dry_run'] measurements = [] print(f'Running {model_name} : {specific_params}') # remove", "msg = validate_trials_config(config_dir) if config is None: print(msg) return 1", "to the directory \"\"\" all_logs = glob.glob(os.path.join(os.getcwd(), '*.log')) if not", "= -1 search_time = -1 cost_time = -1 if use_profiling:", "torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True)", "= gen_input(i, specific_params.get('extra_params', dict())) n_reps = specific_params.get('n_reps', config['n_reps']) if use_profiling:", "in range(len(measurements)): data = measurements[j] # do unit conversions now:", "a dict of measurements \"\"\" torch.cuda.reset_max_memory_allocated() # resetting means the", "dry_run, 'input': i, **specific_params }) def main(config_dir, experiment_mode, model_name, input_idx,", "save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name, specific_params, is_baseline=specific_params['memory_budget'] == -1) #", "time import numpy as np import torch from common import", "is not # for getting a baseline memory usage if", "if use_dtr: torch.toggle_log(False) del params batch_size = len(inp[0]) ips =", "= validate_trials_config(config_dir) if config is None: print(msg) return 1 use_dtr", "start_time, 'sync_time': end_sync - start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem': input_mem, 'model_mem':", "import torch from common import invoke_main, read_json, write_json, prepare_out_file, check_file_exists", "there should be at most one log assert len(all_logs) ==", "= glob.glob(os.path.join(os.getcwd(), '*.log')) if not all_logs: return # if we", "budget = 'inf' new_name = '{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent)) filename", "= tqdm(range(dry_run + n_reps)) for j in progress: progress.set_description(f'Rep [{j}]'", "'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget, # profiling (reported in", "that certain constants in the model persist between loop iterations;", "specific_params if use_dtr: assert 'memory_budget' in specific_params if specific_params['memory_budget'] >", "all_logs: return # if we delete all logs in advance,", "'rep': j - dry_run, 'input': i, **specific_params }) def main(config_dir,", "'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log':", "specific_params, log_name): if not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename,", "random import time import numpy as np import torch from", "use_dtr: result['cuda_time'] = cuda_time else: result['cuda_time'] = -1.0 return result", "trial is not # for getting a baseline memory usage", "= produce_model(extra_params=extra_params) params = [] for m in model: if", "if use_dtr: torch.reset_profile() start.record() # with torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion,", "res['batch_size'] train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\" with open(out_file, \"a\") as fout:", "batch_size = None for res in measurements: batch_size = res['batch_size']", "fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save results to {out_file}\") # write to", "meaning only the input input_mem = torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params)", "reset to # only what's in scope, meaning only the", "unit conversions now: times in ms, # memory in MB", "model_util.get_criterion(model_name) produce_model, gen_input, run_model, teardown = model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp", "extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, log_name): if not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir,", "measurements = [] print(f'Running {model_name} : {specific_params}') # remove any", "torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion, *model, *inp, optimizer=optimizer) end.record() start_sync =", "produce_model, gen_input, run_model, teardown = model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp =", "= model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp = gen_input(i, specific_params.get('extra_params', dict())) n_reps", "return if save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name, specific_params, is_baseline=specific_params['memory_budget'] ==", "torch.cuda.reset_max_memory_allocated() # resetting means the count should be reset to", "elapsed time is already in ms 'gpu_time': float(data['gpu_time']), # 'cuda_time'", "validate_trials_config(config_dir) if config is None: print(msg) return 1 use_dtr =", "'sync_time': end_sync - start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem': input_mem, 'model_mem': model_mem,", "n_reps)) for j in progress: progress.set_description(f'Rep [{j}]' + '' if", "'input_mem': input_mem, 'model_mem': model_mem, 'total_mem': total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time,", "specific params, esp. for DTR specific_params = read_json(cwd, params_file) if", "'' if j > dry_run else f'Dry run [{j}]') gc.collect()", "-1 search_time = -1 cost_time = -1 if use_profiling: base_compute_time", "'dtr') i = int(input_idx) is_trial = trial_run == 'True' if", "tracked by DTR cuda_time = torch.compute_time() base_compute_time = -1 remat_compute_time", "'__main__': invoke_main(main, 'config_dir', 'experiment_mode', 'model_name', 'input_idx', 'params_file', 'out_file', 'trial_run', 'trial_run_outfile')", "produce_model(extra_params=extra_params) params = [] for m in model: if hasattr(m,", "\"\"\" This function initializes a model and performs a single", "total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time': search_time, 'cost_time': cost_time, 'batch_size':", "data['time']*1e3, 'sync_time': data['sync_time']*1e3, # pytorch's cuda elapsed time is already", "function initializes a model and performs a single measurement of", "def run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params, use_dtr, use_profiling):", "print(f'Running {model_name} : {specific_params}') # remove any logs hanging around", "MB writer.writerow({ 'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3, # pytorch's cuda elapsed", "(so we only have to look for one) delete_logs() #", "for getting a baseline memory usage if trial_run: write_json(os.getcwd(), trial_run_outfile,", "if save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name, specific_params, is_baseline=specific_params['memory_budget'] == -1)", "batch_size = len(inp[0]) ips = batch_size / (end_time - start_time)", "config['set_seed']: torch.manual_seed(config['seed'] + i) random.seed(config['seed'] + i) cwd = os.getcwd()", "each input as a separate process. A little ugly but", "model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log': log_name, 'has_start': True }) write_json(dest_dir, sim_conf_filename,", "move it to the directory \"\"\" all_logs = glob.glob(os.path.join(os.getcwd(), '*.log'))", "one log assert len(all_logs) == 1 most_recent = all_logs[0] #", "{ 'network': model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr', 'budget': specific_params['memory_budget'], 'batch_size': batch_size,", "torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del model if use_dtr: torch.toggle_log(False) del params", "write_json(os.getcwd(), trial_run_outfile, { 'mem' : max(map(lambda data: data['total_mem'], measurements)) })", "*inp, optimizer=optimizer) end.record() start_sync = time.time() torch.cuda.synchronize() end_sync = time.time()", "config is None: print(msg) return 1 use_dtr = (experiment_mode ==", "only save logs for the final input on DTR save_log", "is_trial = trial_run == 'True' if config['set_seed']: torch.manual_seed(config['seed'] + i)", "use_dtr=use_dtr, use_profiling=use_profiling) if j >= dry_run: measurements.append(res) # Dump results", "model_name, specific_params, filename) def delete_logs(): for log in glob.glob(os.path.join(os.getcwd(), '*.log')):", "of the model on the given input. While it might", "'mem' : max(map(lambda data: data['total_mem'], measurements)) }) return if save_log:", "model: if hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated() optimizer =", "+ i) cwd = os.getcwd() # handle specific params, esp.", "= prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename) if is_baseline and sim_conf_filename is", "-1 cost_time = -1 if use_profiling: base_compute_time = torch.base_compute_time() remat_compute_time", "== 1 most_recent = all_logs[0] # rename and move #", "+ n_reps)) for j in progress: progress.set_description(f'Rep [{j}]' + ''", "res in measurements: batch_size = res['batch_size'] train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\"", "not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict()) conf =", "torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params) params = [] for m in", "def timing_loop(model_name, i, config, use_dtr, specific_params, writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0):", "data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep': j - dry_run, 'input': i, **specific_params", "memory_budget=specific_params.get('memory_budget', -1)) if __name__ == '__main__': invoke_main(main, 'config_dir', 'experiment_mode', 'model_name',", "already in ms 'gpu_time': float(data['gpu_time']), # 'cuda_time' : float(data['cuda_time']) *", "if is_trial: timing_loop(model_name, i, config, use_dtr, specific_params, None, True, trial_run_outfile)", "for j in progress: progress.set_description(f'Rep [{j}]' + '' if j", "use_dtr, specific_params, None, True, trial_run_outfile) return with open(out_file, 'a', newline='')", "performs a single measurement of the model on the given", "= use_dtr and specific_params.get('use_profiling', False) use_cudnn = model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn,", "for DTR specific_params = read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET' in os.environ:", "csv file only when this trial is not # for", "loop iterations; performing these actions in a separate *function scope*", "search_time = torch.search_time() cost_time = torch.cost_time() torch.reset_profile() total_mem = torch.cuda.max_memory_allocated()", "trial_run_outfile=None): if 'DTR_MODEL_NAME' in os.environ: model_name = os.environ['DTR_MODEL_NAME'] config, msg", "time is already in ms 'gpu_time': float(data['gpu_time']), # 'cuda_time' :", "ms 'gpu_time': float(data['gpu_time']), # 'cuda_time' : float(data['cuda_time']) * 1e-6, 'input_mem':", "'cost_time': data['cost_time']*1e-6, 'rep': j - dry_run, 'input': i, **specific_params })", "effective \"\"\" import gc import glob import json import os", "logs hanging around (so we only have to look for", "is over for j in range(len(measurements)): data = measurements[j] #", "# memory in MB writer.writerow({ 'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3, #", "search_time, 'cost_time': cost_time, 'batch_size': batch_size, 'ips': ips } if use_dtr:", "but effective \"\"\" import gc import glob import json import", "j in range(len(measurements)): data = measurements[j] # do unit conversions", "writer = create_csv_writer(csvfile, specific_params) timing_loop(model_name, i, config, use_dtr, specific_params, writer,", "(end_time - start_time) result = { 'time': end_time - start_time,", "of measurements \"\"\" torch.cuda.reset_max_memory_allocated() # resetting means the count should", "time.time() end_time = time.time() # end timing if use_dtr: #", "import json import os import random import time import numpy", "scope, meaning only the input input_mem = torch.cuda.max_memory_allocated() model =", "given input. While it might seem most reasonable to initialize", "- 1 if use_dtr: torch.toggle_log(False) # whether to report profiling", "from pt_trial_util import create_csv_writer from tqdm import tqdm import model_util", "'a', newline='') as csvfile: writer = create_csv_writer(csvfile, specific_params) timing_loop(model_name, i,", "import time import numpy as np import torch from common", "use_profiling = use_dtr and specific_params.get('use_profiling', False) use_cudnn = model_util.use_cudnn(model_name) with", "in a separate *function scope* turned out to be the", "= torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del model if use_dtr: torch.toggle_log(False) del", "return with open(out_file, 'a', newline='') as csvfile: writer = create_csv_writer(csvfile,", "log_name, 'has_start': True }) write_json(dest_dir, sim_conf_filename, conf) def save_trial_log(dest_dir, sim_conf_filename,", "extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, filename) def delete_logs(): for log in", "batch_size = specific_params['batch_size'] budget = specific_params['memory_budget'] if budget < 0:", "filename = prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename) if is_baseline and sim_conf_filename", "result def timing_loop(model_name, i, config, use_dtr, specific_params, writer, trial_run=False, trial_run_outfile=None,", "print(msg) return 1 use_dtr = (experiment_mode == 'dtr') i =", "print(f'Setting budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name, i, config,", "= \"speed_results.tsv\" with open(out_file, \"a\") as fout: val_dict = {", "\"\"\" import gc import glob import json import os import", "'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log': log_name, 'has_start': True })", "budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name, i, config, use_dtr,", "model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp = gen_input(i, specific_params.get('extra_params', dict())) n_reps =", "initialize the model outside of the loop, DTR's logs have", "use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget', -1)) if __name__ == '__main__': invoke_main(main,", "end_time - start_time, 'sync_time': end_sync - start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem':", "time.time() if use_dtr: torch.reset_profile() start.record() # with torch.autograd.profiler.profile(use_cuda=True) as prof:", "} if use_dtr: result['cuda_time'] = cuda_time else: result['cuda_time'] = -1.0", "i) cwd = os.getcwd() # handle specific params, esp. for", "torch.annotate_log('START') res = run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params=specific_params.get('extra_params',", "start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) # start timing torch.cuda.synchronize()", "in conf: conf[model_name] = [] conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']),", "for the final input on DTR save_log = use_dtr and", "little ugly but effective \"\"\" import gc import glob import", "write to csv file only when this trial is not", "1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget, #", "specific_params) timing_loop(model_name, i, config, use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget', -1)) if", "None: extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, filename) def delete_logs(): for log", "'{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent)) filename = prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename)", "advance, there should be at most one log assert len(all_logs)", "config['save_logs']) and i == config['n_inputs'] - 1 if use_dtr: torch.toggle_log(False)", "'cost_time': cost_time, 'batch_size': batch_size, 'ips': ips } if use_dtr: result['cuda_time']", "config, use_dtr, specific_params, writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run = config['dry_run']", "the model persist between loop iterations; performing these actions in", "os.rename(most_recent, filename) if is_baseline and sim_conf_filename is not None: extend_simrd_config(dest_dir,", "turned out to be the only way to prevent having", "is_trial: timing_loop(model_name, i, config, use_dtr, specific_params, None, True, trial_run_outfile) return", "most_recent = all_logs[0] # rename and move # (new name", "1e-3, momentum=0.9, weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) #", "- 1: torch.toggle_log(True) torch.annotate_log('START') res = run_single_measurement(model_name, produce_model, run_model, teardown,", "not None: extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, filename) def delete_logs(): for", "after the trial is over for j in range(len(measurements)): data", "batch_size, 'ips': np.median(train_ips_list) if train_ips_list else -1, } print(val_dict) fout.write(json.dumps(val_dict)", "optimizer = torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True) end", "having those constants hang around. Returns a dict of measurements", "and move # (new name just appends info to the", "do all the writing after the trial is over for", "DTR save_log = use_dtr and specific_params.get('save_logs', config['save_logs']) and i ==", "model outside of the loop, DTR's logs have shown that", "around (so we only have to look for one) delete_logs()", "del model if use_dtr: torch.toggle_log(False) del params batch_size = len(inp[0])", "certain constants in the model persist between loop iterations; performing", "remat_compute_time = torch.remat_compute_time() search_time = torch.search_time() cost_time = torch.cost_time() torch.reset_profile()", "move # (new name just appends info to the old", "create_csv_writer(csvfile, specific_params) timing_loop(model_name, i, config, use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget', -1))", "should be at most one log assert len(all_logs) == 1", "what's in scope, meaning only the input input_mem = torch.cuda.max_memory_allocated()", "search_time = -1 cost_time = -1 if use_profiling: base_compute_time =", "< 0: budget = 'inf' new_name = '{}-{}-{}-{}'.format(model_name, batch_size, budget,", "A little ugly but effective \"\"\" import gc import glob", "model on the given input. While it might seem most", "Annotate where the final run starts in the log if", "and move it to the directory \"\"\" all_logs = glob.glob(os.path.join(os.getcwd(),", "f'Dry run [{j}]') gc.collect() # Annotate where the final run", "logs have shown that certain constants in the model persist", "between loop iterations; performing these actions in a separate *function", "budget < 0: budget = 'inf' new_name = '{}-{}-{}-{}'.format(model_name, batch_size,", "i, config, use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget', -1)) if __name__ ==", "clean up after ourselves delete_logs() # do all the writing", "from tqdm import tqdm import model_util def extend_simrd_config(dest_dir, sim_conf_filename, model_name,", "logs in advance, there should be at most one log", "invoke_main, read_json, write_json, prepare_out_file, check_file_exists from validate_config import validate_trials_config from", "import create_csv_writer from tqdm import tqdm import model_util def extend_simrd_config(dest_dir,", "filename) if is_baseline and sim_conf_filename is not None: extend_simrd_config(dest_dir, sim_conf_filename,", "-1, } print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save results to {out_file}\")", "data['total_mem'], measurements)) }) return if save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name,", "train_ips_list else -1, } print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save results", "scope* turned out to be the only way to prevent", "trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run = config['dry_run'] measurements = [] print(f'Running", "if save_log and j == dry_run + n_reps - 1:", "'memory_budget' in specific_params if specific_params['memory_budget'] > 0: print(f'Setting budget to", "sim_conf_filename, model_name, specific_params, filename) def delete_logs(): for log in glob.glob(os.path.join(os.getcwd(),", "final input on DTR save_log = use_dtr and specific_params.get('save_logs', config['save_logs'])", "DTR's logs have shown that certain constants in the model", "(reported in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time':", "config, msg = validate_trials_config(config_dir) if config is None: print(msg) return", "to be the only way to prevent having those constants", "report profiling info use_profiling = use_dtr and specific_params.get('use_profiling', False) use_cudnn", "to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name, i, config, use_dtr, specific_params,", "= [] batch_size = None for res in measurements: batch_size", "None), model_name, specific_params, is_baseline=specific_params['memory_budget'] == -1) # clean up after", "'resnet50', } train_ips_list = [] batch_size = None for res", "== 'True' if config['set_seed']: torch.manual_seed(config['seed'] + i) random.seed(config['seed'] + i)", "torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name, i, config, use_dtr, specific_params, None, True,", "fout: val_dict = { 'network': model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr', 'budget':", "separate process. A little ugly but effective \"\"\" import gc", "actions in a separate *function scope* turned out to be", "'dtr', 'budget': specific_params['memory_budget'], 'batch_size': batch_size, 'ips': np.median(train_ips_list) if train_ips_list else", "if config['set_seed']: torch.manual_seed(config['seed'] + i) random.seed(config['seed'] + i) cwd =", "'model_mem': model_mem, 'total_mem': total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time': search_time,", "run starts in the log if save_log and j ==", "'batch_size': batch_size, 'ips': ips } if use_dtr: result['cuda_time'] = cuda_time", "validate_trials_config from pt_trial_util import create_csv_writer from tqdm import tqdm import", "starts in the log if save_log and j == dry_run", "teardown = model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp = gen_input(i, specific_params.get('extra_params', dict()))", "timing if use_dtr: # operators-only time, tracked by DTR cuda_time", "'cuda_time' : float(data['cuda_time']) * 1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem':", "if hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(),", "means the count should be reset to # only what's", "= time.time() # end timing if use_dtr: # operators-only time,", "in ms, # memory in MB writer.writerow({ 'time': data['time']*1e3, 'sync_time':", "= cuda_time else: result['cuda_time'] = -1.0 return result def timing_loop(model_name,", "== '__main__': invoke_main(main, 'config_dir', 'experiment_mode', 'model_name', 'input_idx', 'params_file', 'out_file', 'trial_run',", "= read_json(dest_dir, sim_conf_filename) if model_name not in conf: conf[model_name] =", "only way to prevent having those constants hang around. Returns", "initializes a model and performs a single measurement of the", "config, use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget', -1)) if __name__ == '__main__':", "= run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr,", "of memory hanging around between inputs, we run each input", "'input': i, **specific_params }) def main(config_dir, experiment_mode, model_name, input_idx, params_file,", "single measurement of the model on the given input. While", "model_mem, 'total_mem': total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time': search_time, 'cost_time':", "from common import invoke_main, read_json, write_json, prepare_out_file, check_file_exists from validate_config", "run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling)", "is_baseline=False): \"\"\" Find the last DTR log produced in the", "outside of the loop, DTR's logs have shown that certain", "= torch.cuda.Event(enable_timing=True) # start timing torch.cuda.synchronize() start_time = time.time() if", "= time.time() torch.cuda.synchronize() end_sync = time.time() end_time = time.time() #", "newline='') as csvfile: writer = create_csv_writer(csvfile, specific_params) timing_loop(model_name, i, config,", "one) delete_logs() # we only save logs for the final", "import invoke_main, read_json, write_json, prepare_out_file, check_file_exists from validate_config import validate_trials_config", "print(f\"save results to {out_file}\") # write to csv file only", "run_model, teardown, inp, criterion, extra_params, use_dtr, use_profiling): \"\"\" This function", "# do unit conversions now: times in ms, # memory", "writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run = config['dry_run'] measurements = []", "prevent having those constants hang around. Returns a dict of", "the final run starts in the log if save_log and", "Dump results model_name_replace_dict = { 'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50', }", "memory hanging around between inputs, we run each input as", "optimizer=optimizer) end.record() start_sync = time.time() torch.cuda.synchronize() end_sync = time.time() end_time", "in the trial (if any exist) and move it to", "torch.toggle_log(True) torch.annotate_log('START') res = run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion,", "'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3, # pytorch's cuda elapsed time is", "we run each input as a separate process. A little", "= specific_params['memory_budget'] if budget < 0: budget = 'inf' new_name", "While it might seem most reasonable to initialize the model", "performing these actions in a separate *function scope* turned out", "specific_params, is_baseline=False): \"\"\" Find the last DTR log produced in", "'gpu_time': start.elapsed_time(end), 'input_mem': input_mem, 'model_mem': model_mem, 'total_mem': total_mem, 'base_compute_time': base_compute_time,", "for m in model: if hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem =", "where the final run starts in the log if save_log", "with open(out_file, 'a', newline='') as csvfile: writer = create_csv_writer(csvfile, specific_params)", "tqdm(range(dry_run + n_reps)) for j in progress: progress.set_description(f'Rep [{j}]' +", "i == config['n_inputs'] - 1 if use_dtr: torch.toggle_log(False) # whether", "weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) # start timing", "torch.cost_time() torch.reset_profile() total_mem = torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del model if", "1 if use_dtr: torch.toggle_log(False) # whether to report profiling info", "log if save_log and j == dry_run + n_reps -", "if j > dry_run else f'Dry run [{j}]') gc.collect() #", "measurements)) }) return if save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None), model_name, specific_params,", "and i == config['n_inputs'] - 1 if use_dtr: torch.toggle_log(False) #", "writer.writerow({ 'time': data['time']*1e3, 'sync_time': data['sync_time']*1e3, # pytorch's cuda elapsed time", "ourselves delete_logs() # do all the writing after the trial", "Returns a dict of measurements \"\"\" torch.cuda.reset_max_memory_allocated() # resetting means", "= { 'time': end_time - start_time, 'sync_time': end_sync - start_sync,", "i, config, use_dtr, specific_params, None, True, trial_run_outfile) return with open(out_file,", "on DTR save_log = use_dtr and specific_params.get('save_logs', config['save_logs']) and i", "write_json(dest_dir, sim_conf_filename, conf) def save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params, is_baseline=False): \"\"\"", "* 1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget,", "dry_run + n_reps - 1: torch.toggle_log(True) torch.annotate_log('START') res = run_single_measurement(model_name,", "if trial_run: write_json(os.getcwd(), trial_run_outfile, { 'mem' : max(map(lambda data: data['total_mem'],", "use_dtr: torch.toggle_log(False) # whether to report profiling info use_profiling =", "= model_util.get_criterion(model_name) produce_model, gen_input, run_model, teardown = model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr)", "final run starts in the log if save_log and j", "to the old one) batch_size = specific_params['batch_size'] budget = specific_params['memory_budget']", "= torch.base_compute_time() remat_compute_time = torch.remat_compute_time() search_time = torch.search_time() cost_time =", "DTR log produced in the trial (if any exist) and", "torch.manual_seed(config['seed'] + i) random.seed(config['seed'] + i) cwd = os.getcwd() #", "len(inp[0]) ips = batch_size / (end_time - start_time) result =", "inp, criterion, extra_params, use_dtr, use_profiling): \"\"\" This function initializes a", "i = int(input_idx) is_trial = trial_run == 'True' if config['set_seed']:", "model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion = model_util.get_criterion(model_name) produce_model, gen_input, run_model,", "[] conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type':", "dry_run = config['dry_run'] measurements = [] print(f'Running {model_name} : {specific_params}')", "data: data['total_mem'], measurements)) }) return if save_log: save_trial_log(config['log_dest'], config.get('simrd_config', None),", "now: times in ms, # memory in MB writer.writerow({ 'time':", "return 1 use_dtr = (experiment_mode == 'dtr') i = int(input_idx)", "data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep': j -", "model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log': log_name,", "= torch.search_time() cost_time = torch.cost_time() torch.reset_profile() total_mem = torch.cuda.max_memory_allocated() teardown(*model)", "all_logs = glob.glob(os.path.join(os.getcwd(), '*.log')) if not all_logs: return # if", "with torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion, *model, *inp, optimizer=optimizer) end.record() start_sync", "delete_logs() # do all the writing after the trial is", "directory \"\"\" all_logs = glob.glob(os.path.join(os.getcwd(), '*.log')) if not all_logs: return", "around. Returns a dict of measurements \"\"\" torch.cuda.reset_max_memory_allocated() # resetting", "torch.toggle_log(False) del params batch_size = len(inp[0]) ips = batch_size /", "read_json, write_json, prepare_out_file, check_file_exists from validate_config import validate_trials_config from pt_trial_util", "is_baseline and sim_conf_filename is not None: extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params,", "torch.cuda.synchronize() start_time = time.time() if use_dtr: torch.reset_profile() start.record() # with", "start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem': input_mem, 'model_mem': model_mem, 'total_mem': total_mem, 'base_compute_time':", "else f'Dry run [{j}]') gc.collect() # Annotate where the final", "hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(), 1e-3,", "memory usage if trial_run: write_json(os.getcwd(), trial_run_outfile, { 'mem' : max(map(lambda", "not # for getting a baseline memory usage if trial_run:", "to csv file only when this trial is not #", "# 'cuda_time' : float(data['cuda_time']) * 1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6,", "in model: if hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated() optimizer", "with open(out_file, \"a\") as fout: val_dict = { 'network': model_name_replace_dict.get(model_name,", "model_name_replace_dict = { 'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50', } train_ips_list =", "'type': model_util.get_model_type(model_name), 'log': log_name, 'has_start': True }) write_json(dest_dir, sim_conf_filename, conf)", "= 'inf' new_name = '{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent)) filename =", "{ 'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50', } train_ips_list = [] batch_size", "\"\"\" Find the last DTR log produced in the trial", "torch.cuda.reset_max_memory_allocated() del model if use_dtr: torch.toggle_log(False) del params batch_size =", "'ips': np.median(train_ips_list) if train_ips_list else -1, } print(val_dict) fout.write(json.dumps(val_dict) +", "'batch_size': batch_size, 'ips': np.median(train_ips_list) if train_ips_list else -1, } print(val_dict)", "conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name),", "as fout: val_dict = { 'network': model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr',", "{ 'mem' : max(map(lambda data: data['total_mem'], measurements)) }) return if", "torch.reset_profile() total_mem = torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del model if use_dtr:", "> 0: print(f'Setting budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name,", "os.getcwd() # handle specific params, esp. for DTR specific_params =", "in the model persist between loop iterations; performing these actions", "out_file, trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME' in os.environ: model_name = os.environ['DTR_MODEL_NAME']", "import gc import glob import json import os import random", "'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep': j - dry_run, 'input': i,", "produce_model, run_model, teardown, inp, criterion, extra_params, use_dtr, use_profiling): \"\"\" This", "range(len(measurements)): data = measurements[j] # do unit conversions now: times", "torch.toggle_log(False) # whether to report profiling info use_profiling = use_dtr", "sim_conf_filename) if model_name not in conf: conf[model_name] = [] conf[model_name].append({", "base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time': search_time, 'cost_time': cost_time, 'batch_size': batch_size, 'ips':", "This function initializes a model and performs a single measurement", "# we only save logs for the final input on", "start_time) result = { 'time': end_time - start_time, 'sync_time': end_sync", "= -1 remat_compute_time = -1 search_time = -1 cost_time =", "specific_params, None, True, trial_run_outfile) return with open(out_file, 'a', newline='') as", "use_profiling: base_compute_time = torch.base_compute_time() remat_compute_time = torch.remat_compute_time() search_time = torch.search_time()", "def extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, log_name): if not check_file_exists(dest_dir, sim_conf_filename):", "we only have to look for one) delete_logs() # we", "delete_logs(): for log in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def run_single_measurement(model_name, produce_model,", "inp, criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling) if j >= dry_run:", "end_time = time.time() # end timing if use_dtr: # operators-only", "# rename and move # (new name just appends info", "# profiling (reported in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time':", "when this trial is not # for getting a baseline", "end.record() start_sync = time.time() torch.cuda.synchronize() end_sync = time.time() end_time =", "if 'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in", "new_name = '{}-{}-{}-{}'.format(model_name, batch_size, budget, os.path.basename(most_recent)) filename = prepare_out_file(dest_dir, new_name)", "all_logs[0] # rename and move # (new name just appends", "= create_csv_writer(csvfile, specific_params) timing_loop(model_name, i, config, use_dtr, specific_params, writer, memory_budget=specific_params.get('memory_budget',", "True }) write_json(dest_dir, sim_conf_filename, conf) def save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params,", "up after ourselves delete_logs() # do all the writing after", "- dry_run, 'input': i, **specific_params }) def main(config_dir, experiment_mode, model_name,", "'search_time': search_time, 'cost_time': cost_time, 'batch_size': batch_size, 'ips': ips } if", "criterion, extra_params, use_dtr, use_profiling): \"\"\" This function initializes a model", "import glob import json import os import random import time", "is already in ms 'gpu_time': float(data['gpu_time']), # 'cuda_time' : float(data['cuda_time'])", "dict())) n_reps = specific_params.get('n_reps', config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling) progress =", "for log in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def run_single_measurement(model_name, produce_model, run_model,", "input input_mem = torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params) params = []", "torch.cuda.Event(enable_timing=True) end = torch.cuda.Event(enable_timing=True) # start timing torch.cuda.synchronize() start_time =", "= torch.cost_time() torch.reset_profile() total_mem = torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del model", "[{j}]' + '' if j > dry_run else f'Dry run", "in specific_params if specific_params['memory_budget'] > 0: print(f'Setting budget to {int(specific_params[\"memory_budget\"])}')", "= torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params) params = [] for m", "benchmark=use_cudnn): criterion = model_util.get_criterion(model_name) produce_model, gen_input, run_model, teardown = model_util.prepare_model(model_name,", "os.environ['DTR_MODEL_NAME'] config, msg = validate_trials_config(config_dir) if config is None: print(msg)", "end = torch.cuda.Event(enable_timing=True) # start timing torch.cuda.synchronize() start_time = time.time()", "torch.remat_compute_time() search_time = torch.search_time() cost_time = torch.cost_time() torch.reset_profile() total_mem =", "model and performs a single measurement of the model on", "(experiment_mode == 'dtr') i = int(input_idx) is_trial = trial_run ==", "# start timing torch.cuda.synchronize() start_time = time.time() if use_dtr: torch.reset_profile()", "timing_loop(model_name, i, config, use_dtr, specific_params, writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run", "= measurements[j] # do unit conversions now: times in ms,", "out_file = \"speed_results.tsv\" with open(out_file, \"a\") as fout: val_dict =", "'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget, # profiling", "have to look for one) delete_logs() # we only save", "= read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget'] = float(os.environ['DTR_MEMORY_BUDGET'])", "def delete_logs(): for log in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def run_single_measurement(model_name,", "None, True, trial_run_outfile) return with open(out_file, 'a', newline='') as csvfile:", "batch_size, budget, os.path.basename(most_recent)) filename = prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename) if", "the directory \"\"\" all_logs = glob.glob(os.path.join(os.getcwd(), '*.log')) if not all_logs:", "model_name not in conf: conf[model_name] = [] conf[model_name].append({ 'name': model_util.get_model_family(model_name),", "model_name, specific_params, is_baseline=False): \"\"\" Find the last DTR log produced", "else -1, } print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save results to", "open(out_file, \"a\") as fout: val_dict = { 'network': model_name_replace_dict.get(model_name, model_name),", "'log': log_name, 'has_start': True }) write_json(dest_dir, sim_conf_filename, conf) def save_trial_log(dest_dir,", "hanging around (so we only have to look for one)", "== dry_run + n_reps - 1: torch.toggle_log(True) torch.annotate_log('START') res =", "dict()), use_dtr=use_dtr, use_profiling=use_profiling) if j >= dry_run: measurements.append(res) # Dump", ": max(map(lambda data: data['total_mem'], measurements)) }) return if save_log: save_trial_log(config['log_dest'],", "specific_params['memory_budget'] if budget < 0: budget = 'inf' new_name =", "params_file, out_file, trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME' in os.environ: model_name =", "else: result['cuda_time'] = -1.0 return result def timing_loop(model_name, i, config,", "model persist between loop iterations; performing these actions in a", "prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename) if is_baseline and sim_conf_filename is not", "'gpu_time': float(data['gpu_time']), # 'cuda_time' : float(data['cuda_time']) * 1e-6, 'input_mem': data['input_mem']*1e-6,", "timing torch.cuda.synchronize() start_time = time.time() if use_dtr: torch.reset_profile() start.record() #", "times in ms, # memory in MB writer.writerow({ 'time': data['time']*1e3,", "def main(config_dir, experiment_mode, model_name, input_idx, params_file, out_file, trial_run=False, trial_run_outfile=None): if", "base_compute_time = torch.base_compute_time() remat_compute_time = torch.remat_compute_time() search_time = torch.search_time() cost_time", "use_dtr, specific_params, writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run = config['dry_run'] measurements", "input_idx, params_file, out_file, trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME' in os.environ: model_name", "the only way to prevent having those constants hang around.", "most reasonable to initialize the model outside of the loop,", "persist between loop iterations; performing these actions in a separate", "cost_time, 'batch_size': batch_size, 'ips': ips } if use_dtr: result['cuda_time'] =", "tqdm import tqdm import model_util def extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params,", "[] for m in model: if hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem", "DTR cuda_time = torch.compute_time() base_compute_time = -1 remat_compute_time = -1", "= specific_params.get('n_reps', config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run +", "'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50', } train_ips_list = [] batch_size =", "1: torch.toggle_log(True) torch.annotate_log('START') res = run_single_measurement(model_name, produce_model, run_model, teardown, inp,", "max(map(lambda data: data['total_mem'], measurements)) }) return if save_log: save_trial_log(config['log_dest'], config.get('simrd_config',", "as csvfile: writer = create_csv_writer(csvfile, specific_params) timing_loop(model_name, i, config, use_dtr,", "the loop, DTR's logs have shown that certain constants in", "a model and performs a single measurement of the model", "be reset to # only what's in scope, meaning only", "i) random.seed(config['seed'] + i) cwd = os.getcwd() # handle specific", "log assert len(all_logs) == 1 most_recent = all_logs[0] # rename", "conf = read_json(dest_dir, sim_conf_filename) if model_name not in conf: conf[model_name]", "# operators-only time, tracked by DTR cuda_time = torch.compute_time() base_compute_time", "use_dtr and specific_params.get('use_profiling', False) use_cudnn = model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn):", "specific_params = read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET' in os.environ: specific_params['memory_budget'] =", "params = [] for m in model: if hasattr(m, 'parameters'):", "cuda_time else: result['cuda_time'] = -1.0 return result def timing_loop(model_name, i,", "= specific_params['batch_size'] budget = specific_params['memory_budget'] if budget < 0: budget", "glob import json import os import random import time import", "params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4)", "gc import glob import json import os import random import", "those constants hang around. Returns a dict of measurements \"\"\"", "delete all logs in advance, there should be at most", "[] print(f'Running {model_name} : {specific_params}') # remove any logs hanging", "handle specific params, esp. for DTR specific_params = read_json(cwd, params_file)", "sim_conf_filename, model_name, specific_params, log_name): if not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename)", "== 'dtr') i = int(input_idx) is_trial = trial_run == 'True'", ">= dry_run: measurements.append(res) # Dump results model_name_replace_dict = { 'tv_resnet152':", "and performs a single measurement of the model on the", "resetting means the count should be reset to # only", "the trial (if any exist) and move it to the", "m in model: if hasattr(m, 'parameters'): params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated()", "/ (end_time - start_time) result = { 'time': end_time -", "trial (if any exist) and move it to the directory", "len(all_logs) == 1 most_recent = all_logs[0] # rename and move", "config, use_dtr, specific_params, None, True, trial_run_outfile) return with open(out_file, 'a',", "in progress: progress.set_description(f'Rep [{j}]' + '' if j > dry_run", "use_profiling=use_profiling) if j >= dry_run: measurements.append(res) # Dump results model_name_replace_dict", "} train_ips_list = [] batch_size = None for res in", "random.seed(config['seed'] + i) cwd = os.getcwd() # handle specific params,", "result = { 'time': end_time - start_time, 'sync_time': end_sync -", "save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params, is_baseline=False): \"\"\" Find the last DTR", "# end timing if use_dtr: # operators-only time, tracked by", "model_name, specific_params, log_name): if not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir,", "data = measurements[j] # do unit conversions now: times in", "= int(input_idx) is_trial = trial_run == 'True' if config['set_seed']: torch.manual_seed(config['seed']", "the model on the given input. While it might seem", "trial is over for j in range(len(measurements)): data = measurements[j]", "memory_budget, # profiling (reported in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6,", "'remat_compute_time': remat_compute_time, 'search_time': search_time, 'cost_time': cost_time, 'batch_size': batch_size, 'ips': ips", "# whether to report profiling info use_profiling = use_dtr and", "use_cudnn = model_util.use_cudnn(model_name) with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion = model_util.get_criterion(model_name) produce_model,", "is_baseline=specific_params['memory_budget'] == -1) # clean up after ourselves delete_logs() #", "batch_size, 'ips': ips } if use_dtr: result['cuda_time'] = cuda_time else:", "= [] for m in model: if hasattr(m, 'parameters'): params.extend(m.parameters())", "end timing if use_dtr: # operators-only time, tracked by DTR", "data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget, # profiling (reported in nanoseconds)", "esp. for DTR specific_params = read_json(cwd, params_file) if 'DTR_MEMORY_BUDGET' in", "check_file_exists from validate_config import validate_trials_config from pt_trial_util import create_csv_writer from", "end_sync - start_sync, 'gpu_time': start.elapsed_time(end), 'input_mem': input_mem, 'model_mem': model_mem, 'total_mem':", "specific_params, writer, trial_run=False, trial_run_outfile=None, memory_budget=-1.0): dry_run = config['dry_run'] measurements =", "on the given input. While it might seem most reasonable", "[{j}]') gc.collect() # Annotate where the final run starts in", "os import random import time import numpy as np import", "appends info to the old one) batch_size = specific_params['batch_size'] budget", "torch.cuda.synchronize() end_sync = time.time() end_time = time.time() # end timing", "trial_run_outfile, { 'mem' : max(map(lambda data: data['total_mem'], measurements)) }) return", "val_dict = { 'network': model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr', 'budget': specific_params['memory_budget'],", "# write to csv file only when this trial is", "in ms 'gpu_time': float(data['gpu_time']), # 'cuda_time' : float(data['cuda_time']) * 1e-6,", "import validate_trials_config from pt_trial_util import create_csv_writer from tqdm import tqdm", "model_name, input_idx, params_file, out_file, trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME' in os.environ:", "is None: print(msg) return 1 use_dtr = (experiment_mode == 'dtr')", "use_profiling): \"\"\" This function initializes a model and performs a", "specific_params.get('n_reps', config['n_reps']) if use_profiling: torch.toggle_profile(use_profiling) progress = tqdm(range(dry_run + n_reps))", "to # only what's in scope, meaning only the input", "specific_params.get('layers', model_util.get_model_layers(model_name)), 'type': model_util.get_model_type(model_name), 'log': log_name, 'has_start': True }) write_json(dest_dir,", "# with torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion, *model, *inp, optimizer=optimizer) end.record()", "= float(os.environ['DTR_MEMORY_BUDGET']) assert 'batch_size' in specific_params if use_dtr: assert 'memory_budget'", "use_dtr, use_profiling): \"\"\" This function initializes a model and performs", "if not check_file_exists(dest_dir, sim_conf_filename): prepare_out_file(dest_dir, sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict()) conf", "one) batch_size = specific_params['batch_size'] budget = specific_params['memory_budget'] if budget <", "extra_params, use_dtr, use_profiling): \"\"\" This function initializes a model and", "reasonable to initialize the model outside of the loop, DTR's", "shown that certain constants in the model persist between loop", "by DTR cuda_time = torch.compute_time() base_compute_time = -1 remat_compute_time =", "# remove any logs hanging around (so we only have", "= None for res in measurements: batch_size = res['batch_size'] train_ips_list.append(res['ips'])", "= batch_size / (end_time - start_time) result = { 'time':", "# pytorch's cuda elapsed time is already in ms 'gpu_time':", "'tv_resnet50': 'resnet50', } train_ips_list = [] batch_size = None for", "'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget, # profiling (reported in nanoseconds) 'base_compute_time':", "in specific_params if use_dtr: assert 'memory_budget' in specific_params if specific_params['memory_budget']", "most one log assert len(all_logs) == 1 most_recent = all_logs[0]", "measurement of the model on the given input. While it", "for one) delete_logs() # we only save logs for the", "torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion = model_util.get_criterion(model_name) produce_model, gen_input, run_model, teardown =", "float(data['gpu_time']), # 'cuda_time' : float(data['cuda_time']) * 1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem':", "these actions in a separate *function scope* turned out to", "in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6,", "int(input_idx) is_trial = trial_run == 'True' if config['set_seed']: torch.manual_seed(config['seed'] +", "import model_util def extend_simrd_config(dest_dir, sim_conf_filename, model_name, specific_params, log_name): if not", "data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6, 'memory_budget': memory_budget, # profiling (reported", "j == dry_run + n_reps - 1: torch.toggle_log(True) torch.annotate_log('START') res", "# for getting a baseline memory usage if trial_run: write_json(os.getcwd(),", "<reponame>merrymercy/dtr-prototype<gh_stars>1-10 \"\"\" To avoid any issues of memory hanging around", "'batch_size' in specific_params if use_dtr: assert 'memory_budget' in specific_params if", "to prevent having those constants hang around. Returns a dict", "criterion = model_util.get_criterion(model_name) produce_model, gen_input, run_model, teardown = model_util.prepare_model(model_name, specific_params['batch_size'],", "os.remove(log) def run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params, use_dtr,", "if 'DTR_MODEL_NAME' in os.environ: model_name = os.environ['DTR_MODEL_NAME'] config, msg =", "= torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4) start = torch.cuda.Event(enable_timing=True) end =", "'total_mem': total_mem, 'base_compute_time': base_compute_time, 'remat_compute_time': remat_compute_time, 'search_time': search_time, 'cost_time': cost_time,", "= { 'network': model_name_replace_dict.get(model_name, model_name), 'algorithm': 'dtr', 'budget': specific_params['memory_budget'], 'batch_size':", "we only save logs for the final input on DTR", "use_dtr: torch.reset_profile() start.record() # with torch.autograd.profiler.profile(use_cuda=True) as prof: run_model(criterion, *model,", "\"\\n\") print(f\"save results to {out_file}\") # write to csv file", "# resetting means the count should be reset to #", "save_log and j == dry_run + n_reps - 1: torch.toggle_log(True)", "the trial is over for j in range(len(measurements)): data =", "in the log if save_log and j == dry_run +", "produce_model, run_model, teardown, inp, criterion, extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling) if", "teardown, inp, criterion, extra_params, use_dtr, use_profiling): \"\"\" This function initializes", "of the loop, DTR's logs have shown that certain constants", "progress: progress.set_description(f'Rep [{j}]' + '' if j > dry_run else", "whether to report profiling info use_profiling = use_dtr and specific_params.get('use_profiling',", "= [] conf[model_name].append({ 'name': model_util.get_model_family(model_name), 'batch_size': str(specific_params['batch_size']), 'layers': specific_params.get('layers', model_util.get_model_layers(model_name)),", "results to {out_file}\") # write to csv file only when", "input. While it might seem most reasonable to initialize the", ": float(data['cuda_time']) * 1e-6, 'input_mem': data['input_mem']*1e-6, 'model_mem': data['model_mem']*1e-6, 'total_mem': data['total_mem']*1e-6,", "# if we delete all logs in advance, there should", "'time': end_time - start_time, 'sync_time': end_sync - start_sync, 'gpu_time': start.elapsed_time(end),", "json import os import random import time import numpy as", "= [] print(f'Running {model_name} : {specific_params}') # remove any logs", "only the input input_mem = torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params) params", "old one) batch_size = specific_params['batch_size'] budget = specific_params['memory_budget'] if budget", "if train_ips_list else -1, } print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\") print(f\"save", "= { 'tv_resnet152': 'resnet152', 'tv_resnet50': 'resnet50', } train_ips_list = []", "around between inputs, we run each input as a separate", "with torch.backends.cudnn.flags(enabled=use_cudnn, benchmark=use_cudnn): criterion = model_util.get_criterion(model_name) produce_model, gen_input, run_model, teardown", "j - dry_run, 'input': i, **specific_params }) def main(config_dir, experiment_mode,", "delete_logs() # we only save logs for the final input", "'budget': specific_params['memory_budget'], 'batch_size': batch_size, 'ips': np.median(train_ips_list) if train_ips_list else -1,", "remove any logs hanging around (so we only have to", "os.path.basename(most_recent)) filename = prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename) if is_baseline and", "log in glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def run_single_measurement(model_name, produce_model, run_model, teardown,", "trial_run=False, trial_run_outfile=None): if 'DTR_MODEL_NAME' in os.environ: model_name = os.environ['DTR_MODEL_NAME'] config,", "start_time = time.time() if use_dtr: torch.reset_profile() start.record() # with torch.autograd.profiler.profile(use_cuda=True)", "nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time': data['remat_compute_time']*1e-6, 'search_time': data['search_time']*1e-6, 'cost_time': data['cost_time']*1e-6, 'rep':", "if is_baseline and sim_conf_filename is not None: extend_simrd_config(dest_dir, sim_conf_filename, model_name,", "{int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget'])) if is_trial: timing_loop(model_name, i, config, use_dtr, specific_params, None,", "n_reps - 1: torch.toggle_log(True) torch.annotate_log('START') res = run_single_measurement(model_name, produce_model, run_model,", "budget, os.path.basename(most_recent)) filename = prepare_out_file(dest_dir, new_name) os.rename(most_recent, filename) if is_baseline", "np.median(train_ips_list) if train_ips_list else -1, } print(val_dict) fout.write(json.dumps(val_dict) + \"\\n\")", "if config is None: print(msg) return 1 use_dtr = (experiment_mode", "= torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9, weight_decay=1e-4) start =", "log produced in the trial (if any exist) and move", "None: print(msg) return 1 use_dtr = (experiment_mode == 'dtr') i", "model_util.get_model_type(model_name), 'log': log_name, 'has_start': True }) write_json(dest_dir, sim_conf_filename, conf) def", "j >= dry_run: measurements.append(res) # Dump results model_name_replace_dict = {", "if not all_logs: return # if we delete all logs", "baseline memory usage if trial_run: write_json(os.getcwd(), trial_run_outfile, { 'mem' :", "sim_conf_filename, conf) def save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params, is_baseline=False): \"\"\" Find", "-1 remat_compute_time = -1 search_time = -1 cost_time = -1", "gen_input, run_model, teardown = model_util.prepare_model(model_name, specific_params['batch_size'], use_dtr=use_dtr) inp = gen_input(i,", "be the only way to prevent having those constants hang", "we delete all logs in advance, there should be at", "trial_run_outfile=None, memory_budget=-1.0): dry_run = config['dry_run'] measurements = [] print(f'Running {model_name}", "constants in the model persist between loop iterations; performing these", "model = produce_model(extra_params=extra_params) params = [] for m in model:", "extra_params=specific_params.get('extra_params', dict()), use_dtr=use_dtr, use_profiling=use_profiling) if j >= dry_run: measurements.append(res) #", "cost_time = torch.cost_time() torch.reset_profile() total_mem = torch.cuda.max_memory_allocated() teardown(*model) torch.cuda.reset_max_memory_allocated() del", "glob.glob(os.path.join(os.getcwd(), '*.log')): os.remove(log) def run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion,", "= torch.remat_compute_time() search_time = torch.search_time() cost_time = torch.cost_time() torch.reset_profile() total_mem", "teardown(*model) torch.cuda.reset_max_memory_allocated() del model if use_dtr: torch.toggle_log(False) del params batch_size", "'parameters'): params.extend(m.parameters()) model_mem = torch.cuda.max_memory_allocated() optimizer = torch.optim.SGD(model[0].parameters(), 1e-3, momentum=0.9,", "remat_compute_time, 'search_time': search_time, 'cost_time': cost_time, 'batch_size': batch_size, 'ips': ips }", "> dry_run else f'Dry run [{j}]') gc.collect() # Annotate where", "model_name), 'algorithm': 'dtr', 'budget': specific_params['memory_budget'], 'batch_size': batch_size, 'ips': np.median(train_ips_list) if", "input as a separate process. A little ugly but effective", "to {out_file}\") # write to csv file only when this", "after ourselves delete_logs() # do all the writing after the", "iterations; performing these actions in a separate *function scope* turned", "sim_conf_filename) write_json(dest_dir, sim_conf_filename, dict()) conf = read_json(dest_dir, sim_conf_filename) if model_name", "run_single_measurement(model_name, produce_model, run_model, teardown, inp, criterion, extra_params, use_dtr, use_profiling): \"\"\"", "separate *function scope* turned out to be the only way", "measurements: batch_size = res['batch_size'] train_ips_list.append(res['ips']) out_file = \"speed_results.tsv\" with open(out_file,", "= -1.0 return result def timing_loop(model_name, i, config, use_dtr, specific_params,", "def save_trial_log(dest_dir, sim_conf_filename, model_name, specific_params, is_baseline=False): \"\"\" Find the last", "+ i) random.seed(config['seed'] + i) cwd = os.getcwd() # handle", "use_dtr: assert 'memory_budget' in specific_params if specific_params['memory_budget'] > 0: print(f'Setting", "specific_params.get('save_logs', config['save_logs']) and i == config['n_inputs'] - 1 if use_dtr:", "torch.compute_time() base_compute_time = -1 remat_compute_time = -1 search_time = -1", "(if any exist) and move it to the directory \"\"\"", "the input input_mem = torch.cuda.max_memory_allocated() model = produce_model(extra_params=extra_params) params =", "memory_budget=-1.0): dry_run = config['dry_run'] measurements = [] print(f'Running {model_name} :", "'memory_budget': memory_budget, # profiling (reported in nanoseconds) 'base_compute_time': data['base_compute_time']*1e-6, 'remat_compute_time':", "specific_params['batch_size'] budget = specific_params['memory_budget'] if budget < 0: budget =", "gc.collect() # Annotate where the final run starts in the", "+ n_reps - 1: torch.toggle_log(True) torch.annotate_log('START') res = run_single_measurement(model_name, produce_model,", "specific_params if specific_params['memory_budget'] > 0: print(f'Setting budget to {int(specific_params[\"memory_budget\"])}') torch.set_memory_budget(int(specific_params['memory_budget']))", "it might seem most reasonable to initialize the model outside", "all the writing after the trial is over for j", "use_dtr=use_dtr) inp = gen_input(i, specific_params.get('extra_params', dict())) n_reps = specific_params.get('n_reps', config['n_reps'])" ]
[ "http://www.pdb.org/pdb/files/%s.pdb | cat > %s'%(pdbId.upper(), outName) ## print(cmd) ## call(cmd,", "from subprocess import call try: from downloadPdb import downloadPDB except", "cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb | cat > %s'%(pdbId.upper(), outName) ##", "#### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb')", "| cat > %s'%(pdbId.upper(), outName) ## print(cmd) ## call(cmd, shell=", "outPath, useBioUnit): with open(fname) as f: for line in f:", "import call try: from downloadPdb import downloadPDB except ImportError: from", "> %s'%(pdbId.lower(), outName) ## else: ## cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb", "1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if not os.path.isfile(outName): ## if", "## if not os.path.isfile(outName): ## if useBioUnit: ## cmd= 'wget", "for line in f: pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId, outPath, bioUnit=", "else None) if __name__==\"__main__\": if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2]))", "import sys, os from subprocess import call try: from downloadPdb", "os from subprocess import call try: from downloadPdb import downloadPDB", "downloadPDB except ImportError: from .downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False", "pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId, outPath, bioUnit= 0 if useBioUnit else", "if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2])) print( pdbListFile, outPath) downloadInFile(pdbListFile,", "downloadPdb import downloadPDB except ImportError: from .downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\"", "except ImportError: from .downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def", "ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido #### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat >", "descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido #### wget -qO-", "len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2])) print( pdbListFile, outPath) downloadInFile(pdbListFile, outPath,", "pdbOutPath, useBioUnit): #### descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido", "if useBioUnit else None) if __name__==\"__main__\": if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1]))", "os.path.isfile(outName): ## if useBioUnit: ## cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat", "## else: ## cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb | cat >", "if not os.path.isfile(outName): ## if useBioUnit: ## cmd= 'wget -qO-", "outName) ## else: ## cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb | cat", "def downloadInFile(fname, outPath, useBioUnit): with open(fname) as f: for line", "-qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(), outName) ## else: ## cmd=", ".downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath, useBioUnit):", "|zcat > %s'%(pdbId.lower(), outName) ## else: ## cmd= 'wget -qO-", "import downloadPDB except ImportError: from .downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\"", "outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath, useBioUnit): #### descargar pdb: wget", "cat > %s'%(pdbId.upper(), outName) ## print(cmd) ## call(cmd, shell= True)", "0 if useBioUnit else None) if __name__==\"__main__\": if len(sys.argv)==3: pdbListFile=", "'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(), outName) ## else: ##", "outPath, bioUnit= 0 if useBioUnit else None) if __name__==\"__main__\": if", "downloadInFile(fname, outPath, useBioUnit): with open(fname) as f: for line in", "useBioUnit: ## cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(), outName)", "from .downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath,", "None) if __name__==\"__main__\": if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2])) print(", "> 1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if not os.path.isfile(outName): ##", "open(fname) as f: for line in f: pdbId= line.split()[0] print(pdbId)", "wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ##", "-qO- http://www.pdb.org/pdb/files/%s.pdb | cat > %s'%(pdbId.upper(), outName) ## print(cmd) ##", "## call(cmd, shell= True) def downloadInFile(fname, outPath, useBioUnit): with open(fname)", "##def downloadPDB(pdbId, pdbOutPath, useBioUnit): #### descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o", "f: for line in f: pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId, outPath,", "## if useBioUnit: ## cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat >", "#### descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido #### wget", "pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2])) print( pdbListFile, outPath) downloadInFile(pdbListFile, outPath, USE_BIO_UNIT)", "with open(fname) as f: for line in f: pdbId= line.split()[0]", "f: pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId, outPath, bioUnit= 0 if useBioUnit", "ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if not", "downloadPDB(pdbId, pdbOutPath, useBioUnit): #### descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya", "'wget -qO- http://www.pdb.org/pdb/files/%s.pdb | cat > %s'%(pdbId.upper(), outName) ## print(cmd)", "useBioUnit): with open(fname) as f: for line in f: pdbId=", "print(cmd) ## call(cmd, shell= True) def downloadInFile(fname, outPath, useBioUnit): with", "wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido #### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat", "line.split()[0] print(pdbId) downloadPDB(pdbId, outPath, bioUnit= 0 if useBioUnit else None)", "ImportError: from .downloadPdb import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId,", "pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido #### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz", "ya descomprimido #### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb ##", "os.path.join(pdbOutPath,pdbId+'.pdb') ## if not os.path.isfile(outName): ## if useBioUnit: ## cmd=", "sys, os from subprocess import call try: from downloadPdb import", "else: ## cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb | cat > %s'%(pdbId.upper(),", "## print(cmd) ## call(cmd, shell= True) def downloadInFile(fname, outPath, useBioUnit):", "if useBioUnit: ## cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(),", "%s'%(pdbId.lower(), outName) ## else: ## cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb |", "subprocess import call try: from downloadPdb import downloadPDB except ImportError:", "as f: for line in f: pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId,", "-qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if", "print(pdbId) downloadPDB(pdbId, outPath, bioUnit= 0 if useBioUnit else None) if", "if __name__==\"__main__\": if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2])) print( pdbListFile,", "USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath, useBioUnit): #### descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz", "in f: pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId, outPath, bioUnit= 0 if", "line in f: pdbId= line.split()[0] print(pdbId) downloadPDB(pdbId, outPath, bioUnit= 0", "bioUnit= 0 if useBioUnit else None) if __name__==\"__main__\": if len(sys.argv)==3:", "ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(), outName) ## else: ## cmd= 'wget", "descomprimido #### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb ## outName=", "outName) ## print(cmd) ## call(cmd, shell= True) def downloadInFile(fname, outPath,", "__name__==\"__main__\": if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath= os.path.abspath(os.path.expanduser(sys.argv[2])) print( pdbListFile, outPath)", "o ya descomprimido #### wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz |zcat > 1i1q.pdb", "try: from downloadPdb import downloadPDB except ImportError: from .downloadPdb import", "## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if not os.path.isfile(outName): ## if useBioUnit:", "from downloadPdb import downloadPDB except ImportError: from .downloadPdb import downloadPDB", "shell= True) def downloadInFile(fname, outPath, useBioUnit): with open(fname) as f:", "import downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath, useBioUnit): ####", "downloadPDB pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath, useBioUnit): #### descargar", "True) def downloadInFile(fname, outPath, useBioUnit): with open(fname) as f: for", "not os.path.isfile(outName): ## if useBioUnit: ## cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz", "> %s'%(pdbId.upper(), outName) ## print(cmd) ## call(cmd, shell= True) def", "cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(), outName) ## else:", "pdbListFile=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/117_dimers_list.tsv\" outPath=\"/home/rsanchez/Tesis/rriPredMethod/data/joanDimers/pdbFiles/rawPDBs\" USE_BIO_UNIT=False ##def downloadPDB(pdbId, pdbOutPath, useBioUnit): #### descargar pdb:", "|zcat > 1i1q.pdb ## outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if not os.path.isfile(outName):", "outName= os.path.join(pdbOutPath,pdbId+'.pdb') ## if not os.path.isfile(outName): ## if useBioUnit: ##", "## cmd= 'wget -qO- http://www.pdb.org/pdb/files/%s.pdb | cat > %s'%(pdbId.upper(), outName)", "useBioUnit): #### descargar pdb: wget ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/1i1q.pdb2.gz o ya descomprimido ####", "%s'%(pdbId.upper(), outName) ## print(cmd) ## call(cmd, shell= True) def downloadInFile(fname,", "downloadPDB(pdbId, outPath, bioUnit= 0 if useBioUnit else None) if __name__==\"__main__\":", "call(cmd, shell= True) def downloadInFile(fname, outPath, useBioUnit): with open(fname) as", "## cmd= 'wget -qO- ftp://ftp.wwpdb.org/pub/pdb/data/biounit/coordinates/all/%s.pdb1.gz |zcat > %s'%(pdbId.lower(), outName) ##", "useBioUnit else None) if __name__==\"__main__\": if len(sys.argv)==3: pdbListFile= os.path.abspath(os.path.expanduser(sys.argv[1])) outPath=", "call try: from downloadPdb import downloadPDB except ImportError: from .downloadPdb" ]
[ "log the query and displayed results support.log_matches(querystring, offset, pagesize, matches)", "xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match in enquire.get_mset(offset, pagesize, 100): fields =", "Fetch and display the spy values for facet in spy.values():", "queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\") #", "each match matches = [] ### Start of example code.", "queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\") # And parse the query", "query query = queryparser.parse_query(querystring) # Use an Enquire object on", "search. db = xapian.Database(dbpath) # Set up a QueryParser with", "+ 1, 'docid': match.docid, 'title': fields.get('TITLE', u''), }) matches.append(match.docid) #", "to run the query enquire = xapian.Enquire(db) enquire.set_query(query) # And", "# Use an Enquire object on the database to run", "stemmer and suitable prefixes queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\",", "3: print(\"Usage: %s DBPATH QUERYTERM...\" % sys.argv[0]) sys.exit(1) search(dbpath =", "matches) ### End of example code. if len(sys.argv) < 3:", "QUERYTERM...\" % sys.argv[0]) sys.exit(1) search(dbpath = sys.argv[1], querystring = \"", "# offset - defines starting point within result set #", "database to run the query enquire = xapian.Enquire(db) enquire.set_query(query) #", "values for facet in spy.values(): print(\"Facet: %(term)s; count: %(count)i\" %", "[] ### Start of example code. # Set up a", "%(title)s\" % { 'rank': match.rank + 1, 'docid': match.docid, 'title':", ": facet.termfreq }) # Finally, make sure we log the", "fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\" % { 'rank': match.rank", "queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\") # And parse the query query", "### Start of example code. # Set up a spy", "json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\" % { 'rank': match.rank + 1,", "#!/usr/bin/env python import json import sys import xapian import support", "on the database to run the query enquire = xapian.Enquire(db)", "matches.append(match.docid) # Fetch and display the spy values for facet", "count: %(count)i\" % { 'term' : facet.term.decode('utf-8'), 'count' : facet.termfreq", "match.rank + 1, 'docid': match.docid, 'title': fields.get('TITLE', u''), }) matches.append(match.docid)", "}) # Finally, make sure we log the query and", "queryparser.add_prefix(\"description\", \"XD\") # And parse the query query = queryparser.parse_query(querystring)", "querystring, offset=0, pagesize=10): # offset - defines starting point within", "up a spy to inspect the MAKER value at slot", "queryparser.parse_query(querystring) # Use an Enquire object on the database to", "'term' : facet.term.decode('utf-8'), 'count' : facet.termfreq }) # Finally, make", "offset=0, pagesize=10): # offset - defines starting point within result", "- defines starting point within result set # pagesize -", "of example code. # Set up a spy to inspect", "= xapian.Database(dbpath) # Set up a QueryParser with a stemmer", "enquire.get_mset(offset, pagesize, 100): fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\" %", "facet.termfreq }) # Finally, make sure we log the query", "}) matches.append(match.docid) # Fetch and display the spy values for", "up a QueryParser with a stemmer and suitable prefixes queryparser", "object on the database to run the query enquire =", "code. # Set up a spy to inspect the MAKER", "with a stemmer and suitable prefixes queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\"))", "Finally, make sure we log the query and displayed results", "spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match in enquire.get_mset(offset, pagesize, 100):", "# And parse the query query = queryparser.parse_query(querystring) # Use", "1, 'docid': match.docid, 'title': fields.get('TITLE', u''), }) matches.append(match.docid) # Fetch", "print(u\"%(rank)i: #%(docid)3.3i %(title)s\" % { 'rank': match.rank + 1, 'docid':", "# pagesize - defines number of records to retrieve #", "QueryParser with a stemmer and suitable prefixes queryparser = xapian.QueryParser()", "the spy values for facet in spy.values(): print(\"Facet: %(term)s; count:", "at slot 1 spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match in", "out something about each match matches = [] ### Start", "pagesize - defines number of records to retrieve # Open", "import sys import xapian import support def search(dbpath, querystring, offset=0,", "= queryparser.parse_query(querystring) # Use an Enquire object on the database", "the database to run the query enquire = xapian.Enquire(db) enquire.set_query(query)", "if len(sys.argv) < 3: print(\"Usage: %s DBPATH QUERYTERM...\" % sys.argv[0])", "retrieve # Open the database we're going to search. db", "= json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\" % { 'rank': match.rank +", "facet.term.decode('utf-8'), 'count' : facet.termfreq }) # Finally, make sure we", "within result set # pagesize - defines number of records", "\"S\") queryparser.add_prefix(\"description\", \"XD\") # And parse the query query =", "the query query = queryparser.parse_query(querystring) # Use an Enquire object", "= [] ### Start of example code. # Set up", "'rank': match.rank + 1, 'docid': match.docid, 'title': fields.get('TITLE', u''), })", "for match in enquire.get_mset(offset, pagesize, 100): fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i:", "display the spy values for facet in spy.values(): print(\"Facet: %(term)s;", "in spy.values(): print(\"Facet: %(term)s; count: %(count)i\" % { 'term' :", "of example code. if len(sys.argv) < 3: print(\"Usage: %s DBPATH", "suitable prefixes queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\",", "of records to retrieve # Open the database we're going", "example code. # Set up a spy to inspect the", "%s DBPATH QUERYTERM...\" % sys.argv[0]) sys.exit(1) search(dbpath = sys.argv[1], querystring", "the MAKER value at slot 1 spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy)", "value at slot 1 spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match", "Open the database we're going to search. db = xapian.Database(dbpath)", "spy values for facet in spy.values(): print(\"Facet: %(term)s; count: %(count)i\"", "# And print out something about each match matches =", "prefixes queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\")", "parse the query query = queryparser.parse_query(querystring) # Use an Enquire", "'docid': match.docid, 'title': fields.get('TITLE', u''), }) matches.append(match.docid) # Fetch and", "import xapian import support def search(dbpath, querystring, offset=0, pagesize=10): #", "sys import xapian import support def search(dbpath, querystring, offset=0, pagesize=10):", "search(dbpath, querystring, offset=0, pagesize=10): # offset - defines starting point", "we're going to search. db = xapian.Database(dbpath) # Set up", "fields.get('TITLE', u''), }) matches.append(match.docid) # Fetch and display the spy", "%(term)s; count: %(count)i\" % { 'term' : facet.term.decode('utf-8'), 'count' :", "support def search(dbpath, querystring, offset=0, pagesize=10): # offset - defines", "the database we're going to search. db = xapian.Database(dbpath) #", "python import json import sys import xapian import support def", "% { 'rank': match.rank + 1, 'docid': match.docid, 'title': fields.get('TITLE',", "Start of example code. # Set up a spy to", "pagesize=10): # offset - defines starting point within result set", "100): fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\" % { 'rank':", "< 3: print(\"Usage: %s DBPATH QUERYTERM...\" % sys.argv[0]) sys.exit(1) search(dbpath", "# Set up a QueryParser with a stemmer and suitable", "#%(docid)3.3i %(title)s\" % { 'rank': match.rank + 1, 'docid': match.docid,", "%(count)i\" % { 'term' : facet.term.decode('utf-8'), 'count' : facet.termfreq })", "number of records to retrieve # Open the database we're", "= xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match in enquire.get_mset(offset, pagesize, 100): fields", "= xapian.Enquire(db) enquire.set_query(query) # And print out something about each", "offset - defines starting point within result set # pagesize", "And print out something about each match matches = []", "make sure we log the query and displayed results support.log_matches(querystring,", "MAKER value at slot 1 spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for", "to inspect the MAKER value at slot 1 spy =", "defines number of records to retrieve # Open the database", "match in enquire.get_mset(offset, pagesize, 100): fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i", "print(\"Facet: %(term)s; count: %(count)i\" % { 'term' : facet.term.decode('utf-8'), 'count'", "offset, pagesize, matches) ### End of example code. if len(sys.argv)", "u''), }) matches.append(match.docid) # Fetch and display the spy values", "run the query enquire = xapian.Enquire(db) enquire.set_query(query) # And print", "code. if len(sys.argv) < 3: print(\"Usage: %s DBPATH QUERYTERM...\" %", "'count' : facet.termfreq }) # Finally, make sure we log", "# Set up a spy to inspect the MAKER value", "the query and displayed results support.log_matches(querystring, offset, pagesize, matches) ###", "1 spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match in enquire.get_mset(offset, pagesize,", "Use an Enquire object on the database to run the", "results support.log_matches(querystring, offset, pagesize, matches) ### End of example code.", "records to retrieve # Open the database we're going to", "json import sys import xapian import support def search(dbpath, querystring,", "a stemmer and suitable prefixes queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME)", "db = xapian.Database(dbpath) # Set up a QueryParser with a", "slot 1 spy = xapian.ValueCountMatchSpy(1) enquire.add_matchspy(spy) for match in enquire.get_mset(offset,", "match matches = [] ### Start of example code. #", "'title': fields.get('TITLE', u''), }) matches.append(match.docid) # Fetch and display the", "queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\") # And parse the", "query = queryparser.parse_query(querystring) # Use an Enquire object on the", "inspect the MAKER value at slot 1 spy = xapian.ValueCountMatchSpy(1)", "### End of example code. if len(sys.argv) < 3: print(\"Usage:", "Set up a QueryParser with a stemmer and suitable prefixes", "and suitable prefixes queryparser = xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\")", "\"XD\") # And parse the query query = queryparser.parse_query(querystring) #", "the query enquire = xapian.Enquire(db) enquire.set_query(query) # And print out", "query and displayed results support.log_matches(querystring, offset, pagesize, matches) ### End", "sure we log the query and displayed results support.log_matches(querystring, offset,", "pagesize, matches) ### End of example code. if len(sys.argv) <", "len(sys.argv) < 3: print(\"Usage: %s DBPATH QUERYTERM...\" % sys.argv[0]) sys.exit(1)", "def search(dbpath, querystring, offset=0, pagesize=10): # offset - defines starting", "support.log_matches(querystring, offset, pagesize, matches) ### End of example code. if", "# Open the database we're going to search. db =", "defines starting point within result set # pagesize - defines", "import support def search(dbpath, querystring, offset=0, pagesize=10): # offset -", "displayed results support.log_matches(querystring, offset, pagesize, matches) ### End of example", "DBPATH QUERYTERM...\" % sys.argv[0]) sys.exit(1) search(dbpath = sys.argv[1], querystring =", "pagesize, 100): fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\" % {", "# Finally, make sure we log the query and displayed", "<gh_stars>10-100 #!/usr/bin/env python import json import sys import xapian import", "enquire.add_matchspy(spy) for match in enquire.get_mset(offset, pagesize, 100): fields = json.loads(match.document.get_data().decode('utf8'))", "% sys.argv[0]) sys.exit(1) search(dbpath = sys.argv[1], querystring = \" \".join(sys.argv[2:]))", "print out something about each match matches = [] ###", "a spy to inspect the MAKER value at slot 1", "xapian.Enquire(db) enquire.set_query(query) # And print out something about each match", "query enquire = xapian.Enquire(db) enquire.set_query(query) # And print out something", "database we're going to search. db = xapian.Database(dbpath) # Set", "xapian import support def search(dbpath, querystring, offset=0, pagesize=10): # offset", "enquire.set_query(query) # And print out something about each match matches", "example code. if len(sys.argv) < 3: print(\"Usage: %s DBPATH QUERYTERM...\"", "for facet in spy.values(): print(\"Facet: %(term)s; count: %(count)i\" % {", "And parse the query query = queryparser.parse_query(querystring) # Use an", "in enquire.get_mset(offset, pagesize, 100): fields = json.loads(match.document.get_data().decode('utf8')) print(u\"%(rank)i: #%(docid)3.3i %(title)s\"", "# Fetch and display the spy values for facet in", "% { 'term' : facet.term.decode('utf-8'), 'count' : facet.termfreq }) #", "End of example code. if len(sys.argv) < 3: print(\"Usage: %s", "- defines number of records to retrieve # Open the", "match.docid, 'title': fields.get('TITLE', u''), }) matches.append(match.docid) # Fetch and display", "spy.values(): print(\"Facet: %(term)s; count: %(count)i\" % { 'term' : facet.term.decode('utf-8'),", "and displayed results support.log_matches(querystring, offset, pagesize, matches) ### End of", "Set up a spy to inspect the MAKER value at", "and display the spy values for facet in spy.values(): print(\"Facet:", "starting point within result set # pagesize - defines number", "Enquire object on the database to run the query enquire", "point within result set # pagesize - defines number of", "= xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\") # And", "an Enquire object on the database to run the query", ": facet.term.decode('utf-8'), 'count' : facet.termfreq }) # Finally, make sure", "set # pagesize - defines number of records to retrieve", "a QueryParser with a stemmer and suitable prefixes queryparser =", "{ 'rank': match.rank + 1, 'docid': match.docid, 'title': fields.get('TITLE', u''),", "facet in spy.values(): print(\"Facet: %(term)s; count: %(count)i\" % { 'term'", "we log the query and displayed results support.log_matches(querystring, offset, pagesize,", "print(\"Usage: %s DBPATH QUERYTERM...\" % sys.argv[0]) sys.exit(1) search(dbpath = sys.argv[1],", "to retrieve # Open the database we're going to search.", "about each match matches = [] ### Start of example", "{ 'term' : facet.term.decode('utf-8'), 'count' : facet.termfreq }) # Finally,", "result set # pagesize - defines number of records to", "spy to inspect the MAKER value at slot 1 spy", "matches = [] ### Start of example code. # Set", "xapian.Database(dbpath) # Set up a QueryParser with a stemmer and", "to search. db = xapian.Database(dbpath) # Set up a QueryParser", "something about each match matches = [] ### Start of", "going to search. db = xapian.Database(dbpath) # Set up a", "import json import sys import xapian import support def search(dbpath,", "xapian.QueryParser() queryparser.set_stemmer(xapian.Stem(\"en\")) queryparser.set_stemming_strategy(queryparser.STEM_SOME) queryparser.add_prefix(\"title\", \"S\") queryparser.add_prefix(\"description\", \"XD\") # And parse", "enquire = xapian.Enquire(db) enquire.set_query(query) # And print out something about" ]
[ "< 1.0: raise serializers.ValidationError('Width should be greater than 1.0') return", "Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class Meta: model =", "attrs, source): name = attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]', '-', name).strip('-')", "serializers from .models import Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\"", "raise serializers.ValidationError( 'Either a packicon or the shape of icon", "if name: attrs[source] = name else: raise serializers.ValidationError('Invalid name') return", "serializers.ValidationError('Width should be greater than 1.0') return attrs def validate_name(self,", "CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\" class Meta: model = CollectionIcon def", "serializers.ValidationError( 'Either a packicon or the shape of icon should", "= attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name = re.sub(r'-+',", "attrs.get('svg_d') width = attrs.get('width') if packicon or (svg_d and width):", "width < 1.0: raise serializers.ValidationError('Width should be greater than 1.0')", "class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\" class Meta: model = CollectionIcon", "name).strip('-') name = re.sub(r'-+', '-', name) if name: attrs[source] =", "attrs[source] if width < 1.0: raise serializers.ValidationError('Width should be greater", "packicon or the shape of icon should be given' )", "name = re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name = re.sub(r'-+', '-', name)", "name') return attrs def validate(self, attrs): packicon = attrs.get('packicon') svg_d", "name = attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name =", "(svg_d and width): return attrs raise serializers.ValidationError( 'Either a packicon", "source): width = attrs[source] if width < 1.0: raise serializers.ValidationError('Width", "else: raise serializers.ValidationError('Invalid name') return attrs def validate(self, attrs): packicon", "= ('token', ) class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\" class Meta:", "re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name = re.sub(r'-+', '-', name) if name:", "if width < 1.0: raise serializers.ValidationError('Width should be greater than", "from rest_framework import serializers from .models import Collection, CollectionIcon class", "Serializer. \"\"\" class Meta: model = CollectionIcon def validate_width(self, attrs,", "or (svg_d and width): return attrs raise serializers.ValidationError( 'Either a", "= attrs.get('width') if packicon or (svg_d and width): return attrs", "width = attrs.get('width') if packicon or (svg_d and width): return", "and width): return attrs raise serializers.ValidationError( 'Either a packicon or", "attrs def validate(self, attrs): packicon = attrs.get('packicon') svg_d = attrs.get('svg_d')", "= attrs.get('svg_d') width = attrs.get('width') if packicon or (svg_d and", "return attrs raise serializers.ValidationError( 'Either a packicon or the shape", "= Collection read_only = ('token', ) class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer.", "'Either a packicon or the shape of icon should be", "attrs.get('width') if packicon or (svg_d and width): return attrs raise", "validate_width(self, attrs, source): width = attrs[source] if width < 1.0:", "than 1.0') return attrs def validate_name(self, attrs, source): name =", "serializer\"\"\" class Meta: model = Collection read_only = ('token', )", "raise serializers.ValidationError('Invalid name') return attrs def validate(self, attrs): packicon =", "= attrs.get('packicon') svg_d = attrs.get('svg_d') width = attrs.get('width') if packicon", "CollectionIcon def validate_width(self, attrs, source): width = attrs[source] if width", "= attrs[source] if width < 1.0: raise serializers.ValidationError('Width should be", "validate(self, attrs): packicon = attrs.get('packicon') svg_d = attrs.get('svg_d') width =", "= re.sub(r'-+', '-', name) if name: attrs[source] = name else:", "attrs def validate_name(self, attrs, source): name = attrs[source].lower() name =", "'-', name) if name: attrs[source] = name else: raise serializers.ValidationError('Invalid", "import Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class Meta: model", "def validate_name(self, attrs, source): name = attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]',", "\"\"\"CollectionIcon's Serializer. \"\"\" class Meta: model = CollectionIcon def validate_width(self,", "svg_d = attrs.get('svg_d') width = attrs.get('width') if packicon or (svg_d", "raise serializers.ValidationError('Width should be greater than 1.0') return attrs def", "model = Collection read_only = ('token', ) class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's", "return attrs def validate_name(self, attrs, source): name = attrs[source].lower() name", "1.0: raise serializers.ValidationError('Width should be greater than 1.0') return attrs", "validate_name(self, attrs, source): name = attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]', '-',", "CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class Meta: model = Collection read_only =", "name: attrs[source] = name else: raise serializers.ValidationError('Invalid name') return attrs", "should be greater than 1.0') return attrs def validate_name(self, attrs,", "name else: raise serializers.ValidationError('Invalid name') return attrs def validate(self, attrs):", "re from rest_framework import serializers from .models import Collection, CollectionIcon", "import serializers from .models import Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's", "greater than 1.0') return attrs def validate_name(self, attrs, source): name", "1.0') return attrs def validate_name(self, attrs, source): name = attrs[source].lower()", "a packicon or the shape of icon should be given'", "packicon = attrs.get('packicon') svg_d = attrs.get('svg_d') width = attrs.get('width') if", "Meta: model = Collection read_only = ('token', ) class CollectionIconSerializer(serializers.ModelSerializer):", "= CollectionIcon def validate_width(self, attrs, source): width = attrs[source] if", "attrs[source] = name else: raise serializers.ValidationError('Invalid name') return attrs def", "attrs raise serializers.ValidationError( 'Either a packicon or the shape of", "attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name = re.sub(r'-+', '-',", "attrs.get('packicon') svg_d = attrs.get('svg_d') width = attrs.get('width') if packicon or", "rest_framework import serializers from .models import Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer):", "import re from rest_framework import serializers from .models import Collection,", "packicon or (svg_d and width): return attrs raise serializers.ValidationError( 'Either", "\"\"\" class Meta: model = CollectionIcon def validate_width(self, attrs, source):", "read_only = ('token', ) class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\" class", "def validate_width(self, attrs, source): width = attrs[source] if width <", "('token', ) class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\" class Meta: model", "CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class Meta: model = Collection", "re.sub(r'-+', '-', name) if name: attrs[source] = name else: raise", "name = re.sub(r'-+', '-', name) if name: attrs[source] = name", "'-', name).strip('-') name = re.sub(r'-+', '-', name) if name: attrs[source]", "attrs): packicon = attrs.get('packicon') svg_d = attrs.get('svg_d') width = attrs.get('width')", "be greater than 1.0') return attrs def validate_name(self, attrs, source):", "\"\"\"Collections's serializer\"\"\" class Meta: model = Collection read_only = ('token',", "source): name = attrs[source].lower() name = re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name", "name) if name: attrs[source] = name else: raise serializers.ValidationError('Invalid name')", "from .models import Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class", "= re.sub(r'[^a-z0-9\\-]', '-', name).strip('-') name = re.sub(r'-+', '-', name) if", "model = CollectionIcon def validate_width(self, attrs, source): width = attrs[source]", "class Meta: model = Collection read_only = ('token', ) class", "attrs, source): width = attrs[source] if width < 1.0: raise", "width = attrs[source] if width < 1.0: raise serializers.ValidationError('Width should", ") class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\" class Meta: model =", "class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class Meta: model = Collection read_only", "= name else: raise serializers.ValidationError('Invalid name') return attrs def validate(self,", "return attrs def validate(self, attrs): packicon = attrs.get('packicon') svg_d =", "Meta: model = CollectionIcon def validate_width(self, attrs, source): width =", "Collection read_only = ('token', ) class CollectionIconSerializer(serializers.ModelSerializer): \"\"\"CollectionIcon's Serializer. \"\"\"", "if packicon or (svg_d and width): return attrs raise serializers.ValidationError(", ".models import Collection, CollectionIcon class CollectionSerializer(serializers.ModelSerializer): \"\"\"Collections's serializer\"\"\" class Meta:", "serializers.ValidationError('Invalid name') return attrs def validate(self, attrs): packicon = attrs.get('packicon')", "def validate(self, attrs): packicon = attrs.get('packicon') svg_d = attrs.get('svg_d') width", "class Meta: model = CollectionIcon def validate_width(self, attrs, source): width", "width): return attrs raise serializers.ValidationError( 'Either a packicon or the" ]
[ "if lookup[used] is None: targ = (todo-1)%target + 1 lookup[used]", "nums, k): \"\"\" :type nums: List[int] :type k: int :rtype:", "= sum(nums) if total%k != 0 or max(nums) > total//k:", ":type k: int :rtype: bool \"\"\" def dfs(nums, target, used,", "used, todo, lookup): if lookup[used] is None: targ = (todo-1)%target", "True return dfs(nums, total//k, 0, total, lookup) # Time: O(k^(n-k)", "lookup) # Time: O(k^(n-k) * k!) # Space: O(n) #", "# Time: O(k^(n-k) * k!) # Space: O(n) # DFS", "subset_sums[k]+nums[i] > target: continue subset_sums[k] += nums[i] if dfs(nums, target,", "return lookup[used] total = sum(nums) if total%k or max(nums) >", "bool \"\"\" def dfs(nums, target, used, todo, lookup): if lookup[used]", "* k!) # Space: O(n) # DFS solution with pruning.", "bool \"\"\" def dfs(nums, target, i, subset_sums): if i ==", "i == len(nums): return True for k in range(len(subset_sums)): if", "sum(nums) if total%k != 0 or max(nums) > total//k: return", "& 1) == 0 and num <= targ) return lookup[used]", "target, i+1, subset_sums): return True subset_sums[k] -= nums[i] if not", "int :rtype: bool \"\"\" def dfs(nums, target, used, todo, lookup):", "class Solution(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type nums: List[int]", "i, subset_sums): if i == len(nums): return True for k", "num in enumerate(nums) \\ if ((used>>i) & 1) == 0", "in enumerate(nums) \\ if ((used>>i) & 1) == 0 and", "return True subset_sums[k] -= nums[i] if not subset_sums[k]: break return", "> target: continue subset_sums[k] += nums[i] if dfs(nums, target, i+1,", "if subset_sums[k]+nums[i] > target: continue subset_sums[k] += nums[i] if dfs(nums,", "nums.sort(reverse=True) subset_sums = [0] * k return dfs(nums, total//k, 0,", "# DFS solution with pruning. class Solution2(object): def canPartitionKSubsets(self, nums,", "todo-num, lookup) \\ for i, num in enumerate(nums) \\ if", "(todo-1)%target + 1 lookup[used] = any(dfs(nums, target, used | (1<<i),", "total%k != 0 or max(nums) > total//k: return False nums.sort(reverse=True)", "and num <= targ) return lookup[used] total = sum(nums) if", "total//k: return False lookup = [None] * (1 << len(nums))", "break return False total = sum(nums) if total%k != 0", "subset_sums[k] += nums[i] if dfs(nums, target, i+1, subset_sums): return True", "return False total = sum(nums) if total%k != 0 or", "nums[i] if not subset_sums[k]: break return False total = sum(nums)", "k: int :rtype: bool \"\"\" def dfs(nums, target, used, todo,", "= sum(nums) if total%k or max(nums) > total//k: return False", "solution with pruning. class Solution2(object): def canPartitionKSubsets(self, nums, k): \"\"\"", "# Space: O(n) # DFS solution with pruning. class Solution2(object):", "k): \"\"\" :type nums: List[int] :type k: int :rtype: bool", "O(n) # DFS solution with pruning. class Solution2(object): def canPartitionKSubsets(self,", "def canPartitionKSubsets(self, nums, k): \"\"\" :type nums: List[int] :type k:", "= (todo-1)%target + 1 lookup[used] = any(dfs(nums, target, used |", "\"\"\" def dfs(nums, target, used, todo, lookup): if lookup[used] is", "# Time: O(n*2^n) # Space: O(2^n) class Solution(object): def canPartitionKSubsets(self,", "Time: O(n*2^n) # Space: O(2^n) class Solution(object): def canPartitionKSubsets(self, nums,", "> total//k: return False nums.sort(reverse=True) subset_sums = [0] * k", "k!) # Space: O(n) # DFS solution with pruning. class", "pruning. class Solution2(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type nums:", "+= nums[i] if dfs(nums, target, i+1, subset_sums): return True subset_sums[k]", "(1 << len(nums)) lookup[-1] = True return dfs(nums, total//k, 0,", "return dfs(nums, total//k, 0, total, lookup) # Time: O(k^(n-k) *", "if total%k != 0 or max(nums) > total//k: return False", "len(nums)) lookup[-1] = True return dfs(nums, total//k, 0, total, lookup)", "== 0 and num <= targ) return lookup[used] total =", "dfs(nums, total//k, 0, total, lookup) # Time: O(k^(n-k) * k!)", "or max(nums) > total//k: return False nums.sort(reverse=True) subset_sums = [0]", "len(nums): return True for k in range(len(subset_sums)): if subset_sums[k]+nums[i] >", "return False lookup = [None] * (1 << len(nums)) lookup[-1]", "True subset_sums[k] -= nums[i] if not subset_sums[k]: break return False", "subset_sums[k]: break return False total = sum(nums) if total%k !=", "dfs(nums, target, i+1, subset_sums): return True subset_sums[k] -= nums[i] if", "False nums.sort(reverse=True) subset_sums = [0] * k return dfs(nums, total//k,", "targ = (todo-1)%target + 1 lookup[used] = any(dfs(nums, target, used", "lookup[used] total = sum(nums) if total%k or max(nums) > total//k:", "target: continue subset_sums[k] += nums[i] if dfs(nums, target, i+1, subset_sums):", "total%k or max(nums) > total//k: return False lookup = [None]", "subset_sums = [0] * k return dfs(nums, total//k, 0, subset_sums)", "not subset_sums[k]: break return False total = sum(nums) if total%k", "total//k: return False nums.sort(reverse=True) subset_sums = [0] * k return", "((used>>i) & 1) == 0 and num <= targ) return", ":rtype: bool \"\"\" def dfs(nums, target, i, subset_sums): if i", "= True return dfs(nums, total//k, 0, total, lookup) # Time:", "False total = sum(nums) if total%k != 0 or max(nums)", "O(n*2^n) # Space: O(2^n) class Solution(object): def canPartitionKSubsets(self, nums, k):", "lookup): if lookup[used] is None: targ = (todo-1)%target + 1", "O(k^(n-k) * k!) # Space: O(n) # DFS solution with", "\"\"\" :type nums: List[int] :type k: int :rtype: bool \"\"\"", "<= targ) return lookup[used] total = sum(nums) if total%k or", "\\ if ((used>>i) & 1) == 0 and num <=", "total = sum(nums) if total%k or max(nums) > total//k: return", "for i, num in enumerate(nums) \\ if ((used>>i) & 1)", "total = sum(nums) if total%k != 0 or max(nums) >", "Solution(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type nums: List[int] :type", "subset_sums): if i == len(nums): return True for k in", "!= 0 or max(nums) > total//k: return False nums.sort(reverse=True) subset_sums", "if total%k or max(nums) > total//k: return False lookup =", "\\ for i, num in enumerate(nums) \\ if ((used>>i) &", "nums[i] if dfs(nums, target, i+1, subset_sums): return True subset_sums[k] -=", "lookup) \\ for i, num in enumerate(nums) \\ if ((used>>i)", "canPartitionKSubsets(self, nums, k): \"\"\" :type nums: List[int] :type k: int", "= [None] * (1 << len(nums)) lookup[-1] = True return", "total//k, 0, total, lookup) # Time: O(k^(n-k) * k!) #", "O(2^n) class Solution(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type nums:", ":type k: int :rtype: bool \"\"\" def dfs(nums, target, i,", "\"\"\" def dfs(nums, target, i, subset_sums): if i == len(nums):", ":rtype: bool \"\"\" def dfs(nums, target, used, todo, lookup): if", "target, used, todo, lookup): if lookup[used] is None: targ =", "int :rtype: bool \"\"\" def dfs(nums, target, i, subset_sums): if", "Space: O(2^n) class Solution(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type", "-= nums[i] if not subset_sums[k]: break return False total =", "k: int :rtype: bool \"\"\" def dfs(nums, target, i, subset_sums):", "dfs(nums, target, i, subset_sums): if i == len(nums): return True", "[None] * (1 << len(nums)) lookup[-1] = True return dfs(nums,", "= any(dfs(nums, target, used | (1<<i), todo-num, lookup) \\ for", ":type nums: List[int] :type k: int :rtype: bool \"\"\" def", "def dfs(nums, target, used, todo, lookup): if lookup[used] is None:", "if ((used>>i) & 1) == 0 and num <= targ)", "if not subset_sums[k]: break return False total = sum(nums) if", "False lookup = [None] * (1 << len(nums)) lookup[-1] =", "Solution2(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type nums: List[int] :type", "sum(nums) if total%k or max(nums) > total//k: return False lookup", "> total//k: return False lookup = [None] * (1 <<", "def dfs(nums, target, i, subset_sums): if i == len(nums): return", "todo, lookup): if lookup[used] is None: targ = (todo-1)%target +", "with pruning. class Solution2(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type", "target, i, subset_sums): if i == len(nums): return True for", "return True for k in range(len(subset_sums)): if subset_sums[k]+nums[i] > target:", "class Solution2(object): def canPartitionKSubsets(self, nums, k): \"\"\" :type nums: List[int]", "subset_sums): return True subset_sums[k] -= nums[i] if not subset_sums[k]: break", "None: targ = (todo-1)%target + 1 lookup[used] = any(dfs(nums, target,", "target, used | (1<<i), todo-num, lookup) \\ for i, num", "is None: targ = (todo-1)%target + 1 lookup[used] = any(dfs(nums,", "0 or max(nums) > total//k: return False nums.sort(reverse=True) subset_sums =", "targ) return lookup[used] total = sum(nums) if total%k or max(nums)", "(1<<i), todo-num, lookup) \\ for i, num in enumerate(nums) \\", "0 and num <= targ) return lookup[used] total = sum(nums)", "subset_sums[k] -= nums[i] if not subset_sums[k]: break return False total", "# Space: O(2^n) class Solution(object): def canPartitionKSubsets(self, nums, k): \"\"\"", "i+1, subset_sums): return True subset_sums[k] -= nums[i] if not subset_sums[k]:", "0, total, lookup) # Time: O(k^(n-k) * k!) # Space:", "max(nums) > total//k: return False lookup = [None] * (1", "k in range(len(subset_sums)): if subset_sums[k]+nums[i] > target: continue subset_sums[k] +=", "any(dfs(nums, target, used | (1<<i), todo-num, lookup) \\ for i,", "dfs(nums, target, used, todo, lookup): if lookup[used] is None: targ", "or max(nums) > total//k: return False lookup = [None] *", "nums: List[int] :type k: int :rtype: bool \"\"\" def dfs(nums,", "for k in range(len(subset_sums)): if subset_sums[k]+nums[i] > target: continue subset_sums[k]", "Space: O(n) # DFS solution with pruning. class Solution2(object): def", "if i == len(nums): return True for k in range(len(subset_sums)):", "return False nums.sort(reverse=True) subset_sums = [0] * k return dfs(nums,", "enumerate(nums) \\ if ((used>>i) & 1) == 0 and num", "* (1 << len(nums)) lookup[-1] = True return dfs(nums, total//k,", "True for k in range(len(subset_sums)): if subset_sums[k]+nums[i] > target: continue", "in range(len(subset_sums)): if subset_sums[k]+nums[i] > target: continue subset_sums[k] += nums[i]", "if dfs(nums, target, i+1, subset_sums): return True subset_sums[k] -= nums[i]", "List[int] :type k: int :rtype: bool \"\"\" def dfs(nums, target,", "num <= targ) return lookup[used] total = sum(nums) if total%k", "total, lookup) # Time: O(k^(n-k) * k!) # Space: O(n)", "continue subset_sums[k] += nums[i] if dfs(nums, target, i+1, subset_sums): return", "range(len(subset_sums)): if subset_sums[k]+nums[i] > target: continue subset_sums[k] += nums[i] if", "1 lookup[used] = any(dfs(nums, target, used | (1<<i), todo-num, lookup)", "+ 1 lookup[used] = any(dfs(nums, target, used | (1<<i), todo-num,", "max(nums) > total//k: return False nums.sort(reverse=True) subset_sums = [0] *", "used | (1<<i), todo-num, lookup) \\ for i, num in", "1) == 0 and num <= targ) return lookup[used] total", "i, num in enumerate(nums) \\ if ((used>>i) & 1) ==", "<< len(nums)) lookup[-1] = True return dfs(nums, total//k, 0, total,", "lookup[used] is None: targ = (todo-1)%target + 1 lookup[used] =", "lookup[-1] = True return dfs(nums, total//k, 0, total, lookup) #", "DFS solution with pruning. class Solution2(object): def canPartitionKSubsets(self, nums, k):", "== len(nums): return True for k in range(len(subset_sums)): if subset_sums[k]+nums[i]", "Time: O(k^(n-k) * k!) # Space: O(n) # DFS solution", "lookup[used] = any(dfs(nums, target, used | (1<<i), todo-num, lookup) \\", "| (1<<i), todo-num, lookup) \\ for i, num in enumerate(nums)", "lookup = [None] * (1 << len(nums)) lookup[-1] = True" ]
[ "checker designed to eventually replace Ispell.\"\"\" homepage = \"http://aspell.net/\" gnu_mirror_path", "from spack.package import * # See also: AspellDictPackage class Aspell(AutotoolsPackage,", "extendable = True # support activating dictionaries version('0.60.6.1', sha256='f52583a83a63633701c5f71db3dc40aab87b7f76b29723aeb27941eff42df6e1') patch('fix_cpp.patch')", "# See also: AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell is", "also: AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell is a Free", "MIT) from spack.package import * # See also: AspellDictPackage class", "Copyright 2013-2022 Lawrence Livermore National Security, LLC and other #", "\"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable = True # support activating", "homepage = \"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable = True #", "Spack Project Developers. See the top-level COPYRIGHT file for details.", "AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell is a Free and", "2013-2022 Lawrence Livermore National Security, LLC and other # Spack", "gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable = True # support activating dictionaries", "\"aspell/aspell-0.60.6.1.tar.gz\" extendable = True # support activating dictionaries version('0.60.6.1', sha256='f52583a83a63633701c5f71db3dc40aab87b7f76b29723aeb27941eff42df6e1')", "National Security, LLC and other # Spack Project Developers. See", "# # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import *", "See also: AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell is a", "OR MIT) from spack.package import * # See also: AspellDictPackage", "GNUMirrorPackage): \"\"\"GNU Aspell is a Free and Open Source spell", "LLC and other # Spack Project Developers. See the top-level", "a Free and Open Source spell checker designed to eventually", "designed to eventually replace Ispell.\"\"\" homepage = \"http://aspell.net/\" gnu_mirror_path =", "Aspell is a Free and Open Source spell checker designed", "for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package", "Security, LLC and other # Spack Project Developers. See the", "= \"aspell/aspell-0.60.6.1.tar.gz\" extendable = True # support activating dictionaries version('0.60.6.1',", "import * # See also: AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU", "eventually replace Ispell.\"\"\" homepage = \"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable", "= \"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable = True # support", "to eventually replace Ispell.\"\"\" homepage = \"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\"", "(Apache-2.0 OR MIT) from spack.package import * # See also:", "True # support activating dictionaries version('0.60.6.1', sha256='f52583a83a63633701c5f71db3dc40aab87b7f76b29723aeb27941eff42df6e1') patch('fix_cpp.patch') patch('issue-519.patch', when='@:0.60.6.1')", "Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell is a Free and Open Source", "* # See also: AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell", "= True # support activating dictionaries version('0.60.6.1', sha256='f52583a83a63633701c5f71db3dc40aab87b7f76b29723aeb27941eff42df6e1') patch('fix_cpp.patch') patch('issue-519.patch',", "See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier:", "is a Free and Open Source spell checker designed to", "spell checker designed to eventually replace Ispell.\"\"\" homepage = \"http://aspell.net/\"", "and other # Spack Project Developers. See the top-level COPYRIGHT", "spack.package import * # See also: AspellDictPackage class Aspell(AutotoolsPackage, GNUMirrorPackage):", "class Aspell(AutotoolsPackage, GNUMirrorPackage): \"\"\"GNU Aspell is a Free and Open", "Free and Open Source spell checker designed to eventually replace", "and Open Source spell checker designed to eventually replace Ispell.\"\"\"", "the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0", "replace Ispell.\"\"\" homepage = \"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable =", "file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from", "# Spack Project Developers. See the top-level COPYRIGHT file for", "Project Developers. See the top-level COPYRIGHT file for details. #", "Lawrence Livermore National Security, LLC and other # Spack Project", "details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import", "Source spell checker designed to eventually replace Ispell.\"\"\" homepage =", "Developers. See the top-level COPYRIGHT file for details. # #", "# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other", "SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * # See", "Livermore National Security, LLC and other # Spack Project Developers.", "other # Spack Project Developers. See the top-level COPYRIGHT file", "\"\"\"GNU Aspell is a Free and Open Source spell checker", "Open Source spell checker designed to eventually replace Ispell.\"\"\" homepage", "COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT)", "Ispell.\"\"\" homepage = \"http://aspell.net/\" gnu_mirror_path = \"aspell/aspell-0.60.6.1.tar.gz\" extendable = True", "top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR", "# SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * #" ]
[ "self.piece1 + [cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression", "= [('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol =", "rdkit.six.moves import xrange class TestCase(unittest.TestCase): def setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1", "= Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s", "Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \" testing calculation of multiple descriptors", "self.compos = [('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol", "self.piece1 + [cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \"", "#self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def TestSuite(): suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs'))", "descriptor \" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect =", "unit testing code for compound descriptors \"\"\" from __future__ import", "= {'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results =", "__future__ import print_function import unittest import Parser from rdkit.six.moves import", "xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect = self.piece1 + [cExpr] res =", "[11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001 def testSingleCalcs(self): \" testing calculation of", "testing calculation of multiple descriptors \" for i in xrange(len(self.cExprs)):", "calculation of a single descriptor \" for i in xrange(len(self.cExprs)):", "single descriptor \" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect", "TestCase(unittest.TestCase): def setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']] self.aDict =", "descriptors \"\"\" from __future__ import print_function import unittest import Parser", "[cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr))", "def testSingleCalcs(self): \" testing calculation of a single descriptor \"", "{'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"]", "<reponame>Ivy286/cluster_basedfps<filename>third_party_package/RDKit_2015_03_1/rdkit/ML/Descriptors/UnitTestParser.py # # Copyright (C) 2001 <NAME> # \"\"\" unit", "cExpr= self.cExprs[i] argVect = self.piece1 + [cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect,", "%s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def TestSuite(): suite = unittest.TestSuite()", "\"\"\" unit testing code for compound descriptors \"\"\" from __future__", "{'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11]", "self.tol = 0.0001 def testSingleCalcs(self): \" testing calculation of a", "2001 <NAME> # \"\"\" unit testing code for compound descriptors", "in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect = self.piece1 + [cExpr] res", "Parser from rdkit.six.moves import xrange class TestCase(unittest.TestCase): def setUp(self): print('\\n%s:", "= [['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.} self.compos =", "= unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite if __name__ == '__main__':", "def setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}}", "self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001 def", "xrange class TestCase(unittest.TestCase): def setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']]", "self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)] self.cExprs", "= [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001 def testSingleCalcs(self):", "suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite if __name__ == '__main__': suite =", "self.cExprs[i] argVect = self.piece1 + [cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict])", "i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect = self.piece1 + [cExpr]", "print_function import unittest import Parser from rdkit.six.moves import xrange class", "self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \" testing calculation of multiple descriptors \"", "setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict", "#self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def TestSuite(): suite =", "testing calculation of a single descriptor \" for i in", "Copyright (C) 2001 <NAME> # \"\"\" unit testing code for", "from rdkit.six.moves import xrange class TestCase(unittest.TestCase): def setUp(self): print('\\n%s: '%self.shortDescription(),end='')", "failed'%(cExpr)) def TestSuite(): suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite", "%s failed'%(cExpr)) def TestSuite(): suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return", "testSingleCalcs(self): \" testing calculation of a single descriptor \" for", "argVect = self.piece1 + [cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2)", "res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression", "def TestSuite(): suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite if", "res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \" testing calculation of", "def testMultipleCalcs(self): \" testing calculation of multiple descriptors \" for", "print('\\n%s: '%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict =", "import unittest import Parser from rdkit.six.moves import xrange class TestCase(unittest.TestCase):", "of multiple descriptors \" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i]", "# Copyright (C) 2001 <NAME> # \"\"\" unit testing code", "= self.piece1 + [cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2)", "failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def TestSuite(): suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs'))", "<NAME> # \"\"\" unit testing code for compound descriptors \"\"\"", "self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def TestSuite(): suite", "import print_function import unittest import Parser from rdkit.six.moves import xrange", "argVect = self.piece1 + [cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def", "\"\"\" from __future__ import print_function import unittest import Parser from", "self.cExprs[i] argVect = self.piece1 + [cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2)", "= {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)] self.cExprs =", "self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def TestSuite():", "= Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \" testing calculation of multiple", "return suite if __name__ == '__main__': suite = TestSuite() unittest.TextTestRunner().run(suite)", "for compound descriptors \"\"\" from __future__ import print_function import unittest", "+ [cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \" testing", "class TestCase(unittest.TestCase): def setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']] self.aDict", "[cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self): \" testing calculation", "= [11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001 def testSingleCalcs(self): \" testing calculation", "+ [cExpr] res = Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s", "0.0001 def testSingleCalcs(self): \" testing calculation of a single descriptor", "= self.piece1 + [cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict) self.assertAlmostEqual(res,self.results[i],2) def testMultipleCalcs(self):", "\" testing calculation of multiple descriptors \" for i in", "[('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001", "[\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001 def testSingleCalcs(self): \"", "calculation of multiple descriptors \" for i in xrange(len(self.cExprs)): cExpr=", "a single descriptor \" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i]", "descriptors \" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect =", "import Parser from rdkit.six.moves import xrange class TestCase(unittest.TestCase): def setUp(self):", "for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect = self.piece1 +", "self.pDict = {'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)] self.cExprs = [\"SUM($1)\",\"SUM($1)+SUM($2)\",\"MEAN($1)\",\"DEV($2)\",\"MAX($1)\",\"MIN($2)\",\"SUM($1)/$a\"] self.results", "multiple descriptors \" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect", "= 0.0001 def testSingleCalcs(self): \" testing calculation of a single", "Parser.CalcMultipleCompoundsDescriptor([self.compos,self.compos],argVect, self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr))", "(C) 2001 <NAME> # \"\"\" unit testing code for compound", "self.piece1 = [['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.} self.compos", "suite.addTest(TestCase('testMultipleCalcs')) return suite if __name__ == '__main__': suite = TestSuite()", "compound descriptors \"\"\" from __future__ import print_function import unittest import", "code for compound descriptors \"\"\" from __future__ import print_function import", "self.results = [11.,33.,5.5,9.,10.,2.,0.11] self.tol = 0.0001 def testSingleCalcs(self): \" testing", "import xrange class TestCase(unittest.TestCase): def setUp(self): print('\\n%s: '%self.shortDescription(),end='') self.piece1 =", "\" testing calculation of a single descriptor \" for i", "TestSuite(): suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite if __name__", "# # Copyright (C) 2001 <NAME> # \"\"\" unit testing", "testing code for compound descriptors \"\"\" from __future__ import print_function", "of a single descriptor \" for i in xrange(len(self.cExprs)): cExpr=", "testMultipleCalcs(self): \" testing calculation of multiple descriptors \" for i", "'%self.shortDescription(),end='') self.piece1 = [['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.}", "from __future__ import print_function import unittest import Parser from rdkit.six.moves", "self.aDict,[self.pDict,self.pDict]) self.assertAlmostEqual(res[0],self.results[i],2) self.assertAlmostEqual(res[1],self.results[i],2) #self.assertTrue(abs(res[0]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) #self.assertTrue((res[1]-self.results[i])<self.tol,'Expression %s failed'%(cExpr)) def", "suite = unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite if __name__ ==", "cExpr= self.cExprs[i] argVect = self.piece1 + [cExpr] res = Parser.CalcSingleCompoundDescriptor(self.compos,argVect,self.aDict,self.pDict)", "unittest import Parser from rdkit.six.moves import xrange class TestCase(unittest.TestCase): def", "unittest.TestSuite() suite.addTest(TestCase('testSingleCalcs')) suite.addTest(TestCase('testMultipleCalcs')) return suite if __name__ == '__main__': suite", "[['d1','d2'],['d1','d2']] self.aDict = {'Fe':{'d1':1,'d2':2},'Pt':{'d1':10,'d2':20}} self.pDict = {'d1':100.,'d2':200.} self.compos = [('Fe',1),('Pt',1)]", "\" for i in xrange(len(self.cExprs)): cExpr= self.cExprs[i] argVect = self.piece1", "# \"\"\" unit testing code for compound descriptors \"\"\" from" ]
[ "description=\"Dashboard to explore the data and to create baseline Machine", "License\", \"Programming Language :: Python :: 3\", \"Programming Language ::", "\"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"] }, project_urls={", "python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\",", "from setuptools import setup, find_packages import pathlib here = pathlib.Path(__file__).parent.resolve()", "\"Development Status :: 3 - Alpha\", \"Intended Audience :: Developers\",", "model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development Status ::", ":: Developers\", \"Intended Audience :: Education\", \"License :: OSI Approved", "\"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\",", "version=\"0.1.1\", description=\"Dashboard to explore the data and to create baseline", "\"Intended Audience :: Education\", \"License :: OSI Approved :: MIT", "Language :: Python :: 3\", \"Programming Language :: Python ::", "MIT License\", \"Programming Language :: Python :: 3\", \"Programming Language", ":: Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Scientific/Engineering :: Visualization\"", "url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development Status :: 3 - Alpha\",", "Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development Status", ":: Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\",", "import pathlib here = pathlib.Path(__file__).parent.resolve() long_description = (here / \"readme.md\").read_text(encoding=\"utf-8\")", ":: 3 - Alpha\", \"Intended Audience :: Developers\", \"Intended Audience", "\"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"] },", "to explore the data and to create baseline Machine Learning", ":: 3.7\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic ::", "\"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ],", ":: Scientific/Engineering :: Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[", "long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development Status :: 3", "Language :: Python :: 3.7\", \"Topic :: Scientific/Engineering :: Artificial", "Approved :: MIT License\", \"Programming Language :: Python :: 3\",", "OSI Approved :: MIT License\", \"Programming Language :: Python ::", "Education\", \"License :: OSI Approved :: MIT License\", \"Programming Language", ":: Python :: 3\", \"Programming Language :: Python :: 3.7\",", "3\", \"Programming Language :: Python :: 3.7\", \"Topic :: Scientific/Engineering", "\"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\"", "Artificial Intelligence\", \"Topic :: Scientific/Engineering :: Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"},", "\"Programming Language :: Python :: 3\", \"Programming Language :: Python", "Scientific/Engineering :: Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\",", "packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\",", "\"Programming Language :: Python :: 3.7\", \"Topic :: Scientific/Engineering ::", "to create baseline Machine Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\",", "author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development Status :: 3 - Alpha\", \"Intended", ":: Education\", \"License :: OSI Approved :: MIT License\", \"Programming", "/ \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to explore the data", "data and to create baseline Machine Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\",", "], package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"] }, project_urls={ \"Github\": \"https://github.com/maciek3000/data_dashboard\",", "Python :: 3\", \"Programming Language :: Python :: 3.7\", \"Topic", ":: Python :: 3.7\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\",", "- Alpha\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\",", ":: 3\", \"Programming Language :: Python :: 3.7\", \"Topic ::", "= pathlib.Path(__file__).parent.resolve() long_description = (here / \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\",", "long_description = (here / \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to", "pathlib.Path(__file__).parent.resolve() long_description = (here / \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard", "classifiers=[ \"Development Status :: 3 - Alpha\", \"Intended Audience ::", "install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\",", "Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\",", ":: Artificial Intelligence\", \"Topic :: Scientific/Engineering :: Visualization\" ], package_dir={\"data_dashboard\":", "], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\",", ":: MIT License\", \"Programming Language :: Python :: 3\", \"Programming", "\"License :: OSI Approved :: MIT License\", \"Programming Language ::", "\"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\",", "here = pathlib.Path(__file__).parent.resolve() long_description = (here / \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\",", "find_packages import pathlib here = pathlib.Path(__file__).parent.resolve() long_description = (here /", "\"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"License ::", "\"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Scientific/Engineering ::", "Python :: 3.7\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic", "\"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={", "Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Scientific/Engineering :: Visualization\" ],", "pathlib here = pathlib.Path(__file__).parent.resolve() long_description = (here / \"readme.md\").read_text(encoding=\"utf-8\") setup(", "(here / \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to explore the", "Audience :: Education\", \"License :: OSI Approved :: MIT License\",", "Alpha\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"License", "baseline Machine Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[", "setup, find_packages import pathlib here = pathlib.Path(__file__).parent.resolve() long_description = (here", "= (here / \"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to explore", "long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development Status :: 3 -", "\"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"] }, project_urls={ \"Github\": \"https://github.com/maciek3000/data_dashboard\", }, )", "Intelligence\", \"Topic :: Scientific/Engineering :: Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(),", "setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to explore the data and to", "Machine Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\", classifiers=[ \"Development", ":: OSI Approved :: MIT License\", \"Programming Language :: Python", "3.7\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Scientific/Engineering", "\"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"] }, project_urls={ \"Github\":", "Status :: 3 - Alpha\", \"Intended Audience :: Developers\", \"Intended", "create baseline Machine Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"] }, project_urls={ \"Github\": \"https://github.com/maciek3000/data_dashboard\", },", "explore the data and to create baseline Machine Learning model.\",", "\"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\":", "\"Topic :: Scientific/Engineering :: Visualization\" ], package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\",", "package_dir={\"data_dashboard\": \"data_dashboard\"}, packages=find_packages(), python_requires=\">=3.7\", install_requires=[ \"pandas>=1.2.3\", \"numpy>=1.19.5\", \"scipy>=1.6.1\", \"beautifulsoup4>=4.9.3\", \"scikit-learn>=0.24.1\",", "author_email=\"<EMAIL>\", classifiers=[ \"Development Status :: 3 - Alpha\", \"Intended Audience", "\"readme.md\").read_text(encoding=\"utf-8\") setup( name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to explore the data and", "and to create baseline Machine Learning model.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/maciek3000/data_dashboard\",", "\"scikit-learn>=0.24.1\", \"seaborn>=0.11.1\", \"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\": [\"static/*\",", "3 - Alpha\", \"Intended Audience :: Developers\", \"Intended Audience ::", "\"bokeh>=2.3.0\", \"Jinja2>=2.11.3\", \"xgboost>=1.3.3\", \"lightgbm>=3.2.0\" ], package_data={ \"data_dashboard\": [\"static/*\", \"templates/*\", \"examples/*\"]", "name=\"data_dashboard\", version=\"0.1.1\", description=\"Dashboard to explore the data and to create", "Audience :: Developers\", \"Intended Audience :: Education\", \"License :: OSI", "Developers\", \"Intended Audience :: Education\", \"License :: OSI Approved ::", "the data and to create baseline Machine Learning model.\", long_description=long_description,", "import setup, find_packages import pathlib here = pathlib.Path(__file__).parent.resolve() long_description =", "setuptools import setup, find_packages import pathlib here = pathlib.Path(__file__).parent.resolve() long_description" ]
[ "resp[\"Reservations\"][0][\"Instances\"][0] except Exception as e: logging.debug(e) finally: return instance def", "from e2e import service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource from e2e.replacement_values import", "= k8s.delete_custom_resource(ref, 3, 10) assert deleted except: pass @service_marker @pytest.mark.canary", "_, deleted = k8s.delete_custom_resource(ref, 3, 10) assert deleted except: pass", "instance_state = instance[\"State\"][\"Name\"] except Exception as e: logging.debug(e) finally: return", "if (t['Key'] == INSTANCE_TAG_KEY and t['Value'] == INSTANCE_TAG_VAL): tag_present =", "import pytest import time import logging from acktest.resources import random_suffix_name", "from acktest.resources import random_suffix_name from acktest.k8s import resource as k8s", "= instance[\"Tags\"] tag_present = False for t in instance_tags: if", "True: now = datetime.datetime.now() timeout = now + datetime.timedelta(seconds=timeout_sec) if", "resource_data) cr = k8s.wait_resource_consumed_by_controller(ref) assert cr is not None assert", "under the Apache License, Version 2.0 (the \"License\"). You may", "of the # License is located at # # http://aws.amazon.com/apache2.0/", "# not use this file except in compliance with the", "REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\", 24) test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id =", "state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client, instance_id) if instance_state == desired_state:", "License. A copy of the # License is located at", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "resource_id) assert instance is not None # Give time for", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "State needs to be 'terminated' in order to remove the", "k8s.wait_resource_consumed_by_controller(ref) assert cr is not None assert k8s.get_resource_exists(ref) yield (ref,", ">= timeout: pytest.fail(f\"Timed out waiting for Instance to enter {desired_state}", "type for deterministic testing INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI = \"Amazon", "instance_tags: if (t['Key'] == INSTANCE_TAG_KEY and t['Value'] == INSTANCE_TAG_VAL): tag_present", "ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY", "= subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL # Load", "= resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] =", "instance type for deterministic testing INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI =", "get_instance_state(ec2_client, instance_id): instance_state = None try: instance = get_instance(ec2_client, instance_id)", "\"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS = 300", "instance when tests complete try: _, deleted = k8s.delete_custom_resource(ref, 3,", "except Exception as e: logging.debug(e) @pytest.fixture def instance(ec2_client): test_resource_values =", "Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. #", "subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"]", "get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE", "except Exception as e: logging.debug(e) finally: return instance def get_instance_state(ec2_client,", "= REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\", 24) test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id", "10 TIMEOUT_SECONDS = 300 def get_instance(ec2_client, instance_id: str) -> dict:", "instance to come up wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS) # Validate", "try: instance = get_instance(ec2_client, instance_id) instance_state = instance[\"State\"][\"Name\"] except Exception", "resource_id, 'running', TIMEOUT_SECONDS) # Validate instance tags instance_tags = instance[\"Tags\"]", "= k8s.delete_custom_resource(ref, 2, 5) assert deleted is True # Reservation", "Reserved. # # Licensed under the Apache License, Version 2.0", "dependency on the shared subnet # for successful test cleanup", "k8s from e2e import service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource from e2e.replacement_values", "the Apache License, Version 2.0 (the \"License\"). You may #", "\"License\"). You may # not use this file except in", "resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id", "logging from acktest.resources import random_suffix_name from acktest.k8s import resource as", "not None assert k8s.get_resource_exists(ref) yield (ref, cr) # Delete the", "instance def get_instance_state(ec2_client, instance_id): instance_state = None try: instance =", "yield (ref, cr) # Delete the instance when tests complete", "License is located at # # http://aws.amazon.com/apache2.0/ # # or", "\"virtualization-type\", \"Values\": ['hvm']}, ], ) for image in resp['Images']: if", "= get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] =", "Instance API. \"\"\" import datetime import pytest import time import", "break def get_ami_id(ec2_client): try: # Use latest AL2 resp =", "datetime.datetime.now() >= timeout: pytest.fail(f\"Timed out waiting for Instance to enter", "k8s.create_custom_resource(ref, resource_data) cr = k8s.wait_resource_consumed_by_controller(ref) assert cr is not None", "enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client, instance_id) if instance_state", "image in resp['Images']: if 'Description' in image: if INSTANCE_AMI in", "= datetime.datetime.now() timeout = now + datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >=", "resource ref = k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\", )", "= load_ec2_resource( \"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data) # Create k8s resource", "INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS", "deterministic testing INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI = \"Amazon Linux 2", "{\"Name\": \"virtualization-type\", \"Values\": ['hvm']}, ], ) for image in resp['Images']:", "in resp['Images']: if 'Description' in image: if INSTANCE_AMI in image['Description']:", "CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data) cr =", "will commence termination # State needs to be 'terminated' in", "pytest.fail(f\"Timed out waiting for Instance to enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS)", "instance_tags = instance[\"Tags\"] tag_present = False for t in instance_tags:", "= INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] =", "INSTANCE_TAG_KEY and t['Value'] == INSTANCE_TAG_VAL): tag_present = True assert tag_present", "Inc. or its affiliates. All Rights Reserved. # # Licensed", ") logging.debug(resource_data) # Create k8s resource ref = k8s.CustomResourceReference( CRD_GROUP,", "random_suffix_name(\"instance-ack-test\", 24) test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id =", "+ datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >= timeout: pytest.fail(f\"Timed out waiting for", "instance_state def wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec): while True: now =", "ANY KIND, either # express or implied. See the License", "None try: instance = get_instance(ec2_client, instance_id) instance_state = instance[\"State\"][\"Name\"] except", "Linux 2 Kernel\" INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS", "= \"m4.large\" INSTANCE_AMI = \"Amazon Linux 2 Kernel\" INSTANCE_TAG_KEY =", "the \"license\" file accompanying this file. This file is distributed", "(t['Key'] == INSTANCE_TAG_KEY and t['Value'] == INSTANCE_TAG_VAL): tag_present = True", "in image['Description']: return image['ImageId'] except Exception as e: logging.debug(e) @pytest.fixture", "OF ANY KIND, either # express or implied. See the", "# Reservation still exists, but instance will commence termination #", "file except in compliance with the License. A copy of", "Instance exists instance = get_instance(ec2_client, resource_id) assert instance is not", "cr) # Delete the instance when tests complete try: _,", "2, 5) assert deleted is True # Reservation still exists,", "CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS = 300 def", "\"architecture\", \"Values\": ['x86_64']}, {\"Name\": \"state\", \"Values\": ['available']}, {\"Name\": \"virtualization-type\", \"Values\":", "logging.debug(e) @pytest.fixture def instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\",", "def get_instance(ec2_client, instance_id: str) -> dict: instance = None try:", "get_instance(ec2_client, resource_id) assert instance is not None # Give time", "for instance to come up wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS) #", "Use latest AL2 resp = ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\": \"architecture\",", "instance_id): instance_state = None try: instance = get_instance(ec2_client, instance_id) instance_state", "the dependency on the shared subnet # for successful test", "instance = get_instance(ec2_client, instance_id) instance_state = instance[\"State\"][\"Name\"] except Exception as", "\"state\", \"Values\": ['available']}, {\"Name\": \"virtualization-type\", \"Values\": ['hvm']}, ], ) for", "You may # not use this file except in compliance", "wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS) # Validate instance tags instance_tags =", "assert cr is not None assert k8s.get_resource_exists(ref) yield (ref, cr)", "Kernel\" INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10", "cr is not None assert k8s.get_resource_exists(ref) yield (ref, cr) #", "time for instance to come up wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS)", "namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data) cr = k8s.wait_resource_consumed_by_controller(ref) assert cr is", "from acktest.k8s import resource as k8s from e2e import service_marker,", "= 10 TIMEOUT_SECONDS = 300 def get_instance(ec2_client, instance_id: str) ->", ") instance = resp[\"Reservations\"][0][\"Instances\"][0] except Exception as e: logging.debug(e) finally:", "subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL # Load Instance", "assert k8s.get_resource_exists(ref) yield (ref, cr) # Delete the instance when", "Exception as e: logging.debug(e) finally: return instance_state def wait_for_instance_or_die(ec2_client, instance_id,", "instance = None try: resp = ec2_client.describe_instances( InstanceIds=[instance_id] ) instance", "INSTANCE_AMI in image['Description']: return image['ImageId'] except Exception as e: logging.debug(e)", "All Rights Reserved. # # Licensed under the Apache License,", "@pytest.fixture def instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\", 24)", "if instance_state == desired_state: break def get_ami_id(ec2_client): try: # Use", "t['Value'] == INSTANCE_TAG_VAL): tag_present = True assert tag_present # Delete", "up wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS) # Validate instance tags instance_tags", "copy of the # License is located at # #", "under the License. \"\"\"Integration tests for Instance API. \"\"\" import", "permissions and limitations under the License. \"\"\"Integration tests for Instance", "as e: logging.debug(e) finally: return instance_state def wait_for_instance_or_die(ec2_client, instance_id, desired_state,", "logging.debug(resource_data) # Create k8s resource ref = k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION,", "# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.", "str) -> dict: instance = None try: resp = ec2_client.describe_instances(", "False for t in instance_tags: if (t['Key'] == INSTANCE_TAG_KEY and", "timeout: pytest.fail(f\"Timed out waiting for Instance to enter {desired_state} state\")", "instance_state == desired_state: break def get_ami_id(ec2_client): try: # Use latest", "= k8s.wait_resource_consumed_by_controller(ref) assert cr is not None assert k8s.get_resource_exists(ref) yield", "tag_present # Delete k8s resource _, deleted = k8s.delete_custom_resource(ref, 2,", "not use this file except in compliance with the License.", "cr) = instance resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance", "= cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance exists instance = get_instance(ec2_client,", "CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data) cr = k8s.wait_resource_consumed_by_controller(ref)", "get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name", "TIMEOUT_SECONDS) # Validate instance tags instance_tags = instance[\"Tags\"] tag_present =", "= k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data)", "== INSTANCE_TAG_KEY and t['Value'] == INSTANCE_TAG_VAL): tag_present = True assert", "RESOURCE_PLURAL, resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data) cr = k8s.wait_resource_consumed_by_controller(ref) assert", "ec2_client.describe_instances( InstanceIds=[instance_id] ) instance = resp[\"Reservations\"][0][\"Instances\"][0] except Exception as e:", "# Use latest AL2 resp = ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\":", "if datetime.datetime.now() >= timeout: pytest.fail(f\"Timed out waiting for Instance to", "License for the specific language governing # permissions and limitations", "= get_instance(ec2_client, instance_id) instance_state = instance[\"State\"][\"Name\"] except Exception as e:", "\"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data) # Create k8s resource ref =", "None # Give time for instance to come up wait_for_instance_or_die(ec2_client,", "and t['Value'] == INSTANCE_TAG_VAL): tag_present = True assert tag_present #", "try: _, deleted = k8s.delete_custom_resource(ref, 3, 10) assert deleted except:", "A copy of the # License is located at #", "import datetime import pytest import time import logging from acktest.resources", "Check Instance exists instance = get_instance(ec2_client, resource_id) assert instance is", "# Give time for instance to come up wait_for_instance_or_die(ec2_client, resource_id,", "INSTANCE_TAG_VAL # Load Instance CR resource_data = load_ec2_resource( \"instance\", additional_replacements=test_resource_values,", "= None try: resp = ec2_client.describe_instances( InstanceIds=[instance_id] ) instance =", "TIMEOUT_SECONDS = 300 def get_instance(ec2_client, instance_id: str) -> dict: instance", "image: if INSTANCE_AMI in image['Description']: return image['ImageId'] except Exception as", "(the \"License\"). You may # not use this file except", "latest AL2 resp = ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\": \"architecture\", \"Values\":", "# Delete k8s resource _, deleted = k8s.delete_custom_resource(ref, 2, 5)", "= \"Amazon Linux 2 Kernel\" INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL =", "= get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"] =", "# Licensed under the Apache License, Version 2.0 (the \"License\").", "resource_data = load_ec2_resource( \"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data) # Create k8s", "= \"owner\" INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS =", "the # License is located at # # http://aws.amazon.com/apache2.0/ #", "API. \"\"\" import datetime import pytest import time import logging", "this file except in compliance with the License. A copy", "k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data) cr", "specific language governing # permissions and limitations under the License.", "tag_present = False for t in instance_tags: if (t['Key'] ==", "the License. A copy of the # License is located", "datetime import pytest import time import logging from acktest.resources import", "CR resource_data = load_ec2_resource( \"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data) # Create", "tests complete try: _, deleted = k8s.delete_custom_resource(ref, 3, 10) assert", "and limitations under the License. \"\"\"Integration tests for Instance API.", "@pytest.mark.canary class TestInstance: def test_create_delete(self, ec2_client, instance): (ref, cr) =", "needs to be 'terminated' in order to remove the dependency", "is located at # # http://aws.amazon.com/apache2.0/ # # or in", "governing # permissions and limitations under the License. \"\"\"Integration tests", "See the License for the specific language governing # permissions", "TestInstance: def test_create_delete(self, ec2_client, instance): (ref, cr) = instance resource_id", "or its affiliates. All Rights Reserved. # # Licensed under", "test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"]", "tags instance_tags = instance[\"Tags\"] tag_present = False for t in", "now = datetime.datetime.now() timeout = now + datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now()", "for Instance API. \"\"\" import datetime import pytest import time", "= None try: instance = get_instance(ec2_client, instance_id) instance_state = instance[\"State\"][\"Name\"]", "Validate instance tags instance_tags = instance[\"Tags\"] tag_present = False for", "def get_ami_id(ec2_client): try: # Use latest AL2 resp = ec2_client.describe_images(", "DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS = 300 def get_instance(ec2_client, instance_id: str)", "instance_id: str) -> dict: instance = None try: resp =", "2.0 (the \"License\"). You may # not use this file", "from e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL = \"instances\" # highly available", "or implied. See the License for the specific language governing", "instance = get_instance(ec2_client, resource_id) assert instance is not None #", "in compliance with the License. A copy of the #", "if 'Description' in image: if INSTANCE_AMI in image['Description']: return image['ImageId']", "k8s resource ref = k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\",", "instance = resp[\"Reservations\"][0][\"Instances\"][0] except Exception as e: logging.debug(e) finally: return", "= INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL # Load Instance CR resource_data", "InstanceIds=[instance_id] ) instance = resp[\"Reservations\"][0][\"Instances\"][0] except Exception as e: logging.debug(e)", "test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"]", "time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client, instance_id) if instance_state == desired_state: break", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "# Load Instance CR resource_data = load_ec2_resource( \"instance\", additional_replacements=test_resource_values, )", "== INSTANCE_TAG_VAL): tag_present = True assert tag_present # Delete k8s", "= INSTANCE_TAG_VAL # Load Instance CR resource_data = load_ec2_resource( \"instance\",", "exists instance = get_instance(ec2_client, resource_id) assert instance is not None", "Instance CR resource_data = load_ec2_resource( \"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data) #", "resource_name = random_suffix_name(\"instance-ack-test\", 24) test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0]", "as e: logging.debug(e) finally: return instance def get_instance_state(ec2_client, instance_id): instance_state", "resp = ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\": \"architecture\", \"Values\": ['x86_64']}, {\"Name\":", "deleted = k8s.delete_custom_resource(ref, 2, 5) assert deleted is True #", "as e: logging.debug(e) @pytest.fixture def instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy() resource_name", "e: logging.debug(e) @pytest.fixture def instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy() resource_name =", "out waiting for Instance to enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state", ") k8s.create_custom_resource(ref, resource_data) cr = k8s.wait_resource_consumed_by_controller(ref) assert cr is not", "use this file except in compliance with the License. A", "be 'terminated' in order to remove the dependency on the", "http://aws.amazon.com/apache2.0/ # # or in the \"license\" file accompanying this", "to remove the dependency on the shared subnet # for", "accompanying this file. This file is distributed # on an", "OR CONDITIONS OF ANY KIND, either # express or implied.", "= False for t in instance_tags: if (t['Key'] == INSTANCE_TAG_KEY", "time import logging from acktest.resources import random_suffix_name from acktest.k8s import", "the instance when tests complete try: _, deleted = k8s.delete_custom_resource(ref,", "INSTANCE_AMI = \"Amazon Linux 2 Kernel\" INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL", "10 DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS = 300 def get_instance(ec2_client, instance_id:", "e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL = \"instances\"", "import service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource from e2e.replacement_values import REPLACEMENT_VALUES from", "datetime.datetime.now() timeout = now + datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >= timeout:", "try: resp = ec2_client.describe_instances( InstanceIds=[instance_id] ) instance = resp[\"Reservations\"][0][\"Instances\"][0] except", "e: logging.debug(e) finally: return instance_state def wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec):", "test_resource_values = REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\", 24) test_vpc = get_bootstrap_resources().SharedTestVPC", "import resource as k8s from e2e import service_marker, CRD_GROUP, CRD_VERSION,", "limitations under the License. \"\"\"Integration tests for Instance API. \"\"\"", "k8s.delete_custom_resource(ref, 2, 5) assert deleted is True # Reservation still", "shared subnet # for successful test cleanup wait_for_instance_or_die(ec2_client, resource_id, 'terminated',", "instance[\"Tags\"] tag_present = False for t in instance_tags: if (t['Key']", "], ) for image in resp['Images']: if 'Description' in image:", "# License is located at # # http://aws.amazon.com/apache2.0/ # #", "timeout_sec): while True: now = datetime.datetime.now() timeout = now +", "# # Licensed under the Apache License, Version 2.0 (the", "if INSTANCE_AMI in image['Description']: return image['ImageId'] except Exception as e:", "test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"]", "\"Amazon Linux 2 Kernel\" INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL = \"ack-controller\"", "import REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL = \"instances\" #", "Filters=[ {\"Name\": \"architecture\", \"Values\": ['x86_64']}, {\"Name\": \"state\", \"Values\": ['available']}, {\"Name\":", "Give time for instance to come up wait_for_instance_or_die(ec2_client, resource_id, 'running',", "test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL # Load Instance CR", "logging.debug(e) finally: return instance_state def wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec): while", "True # Reservation still exists, but instance will commence termination", "Reservation still exists, but instance will commence termination # State", "as k8s from e2e import service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource from", "# Check Instance exists instance = get_instance(ec2_client, resource_id) assert instance", "file. This file is distributed # on an \"AS IS\"", "resp['Images']: if 'Description' in image: if INSTANCE_AMI in image['Description']: return", ") for image in resp['Images']: if 'Description' in image: if", "10) assert deleted except: pass @service_marker @pytest.mark.canary class TestInstance: def", "AL2 resp = ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\": \"architecture\", \"Values\": ['x86_64']},", "from e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL =", "test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL #", "instance resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance exists instance", "'terminated' in order to remove the dependency on the shared", "# # http://aws.amazon.com/apache2.0/ # # or in the \"license\" file", "termination # State needs to be 'terminated' in order to", "in image: if INSTANCE_AMI in image['Description']: return image['ImageId'] except Exception", "commence termination # State needs to be 'terminated' in order", "either # express or implied. See the License for the", "finally: return instance def get_instance_state(ec2_client, instance_id): instance_state = None try:", "get_instance(ec2_client, instance_id) instance_state = instance[\"State\"][\"Name\"] except Exception as e: logging.debug(e)", "desired_state: break def get_ami_id(ec2_client): try: # Use latest AL2 resp", "INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI = \"Amazon Linux 2 Kernel\" INSTANCE_TAG_KEY", "2 Kernel\" INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS =", "# # or in the \"license\" file accompanying this file.", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express", "exists, but instance will commence termination # State needs to", "WARRANTIES OR CONDITIONS OF ANY KIND, either # express or", "return instance_state def wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec): while True: now", "k8s.get_resource_exists(ref) yield (ref, cr) # Delete the instance when tests", "instance will commence termination # State needs to be 'terminated'", "acktest.resources import random_suffix_name from acktest.k8s import resource as k8s from", "import get_bootstrap_resources RESOURCE_PLURAL = \"instances\" # highly available instance type", "instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\", 24) test_vpc =", "except: pass @service_marker @pytest.mark.canary class TestInstance: def test_create_delete(self, ec2_client, instance):", "Rights Reserved. # # Licensed under the Apache License, Version", "resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref, resource_data) cr = k8s.wait_resource_consumed_by_controller(ref) assert cr", "= 300 def get_instance(ec2_client, instance_id: str) -> dict: instance =", "This file is distributed # on an \"AS IS\" BASIS,", "for t in instance_tags: if (t['Key'] == INSTANCE_TAG_KEY and t['Value']", "deleted is True # Reservation still exists, but instance will", "with the License. A copy of the # License is", "language governing # permissions and limitations under the License. \"\"\"Integration", "cr = k8s.wait_resource_consumed_by_controller(ref) assert cr is not None assert k8s.get_resource_exists(ref)", "@service_marker @pytest.mark.canary class TestInstance: def test_create_delete(self, ec2_client, instance): (ref, cr)", "test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"]", "REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL = \"instances\" # highly", "ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\": \"architecture\", \"Values\": ['x86_64']}, {\"Name\": \"state\", \"Values\":", "# highly available instance type for deterministic testing INSTANCE_TYPE =", "resource as k8s from e2e import service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource", "= ami_id test_resource_values[\"INSTANCE_TYPE\"] = INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] =", "'running', TIMEOUT_SECONDS) # Validate instance tags instance_tags = instance[\"Tags\"] tag_present", "== desired_state: break def get_ami_id(ec2_client): try: # Use latest AL2", "pass @service_marker @pytest.mark.canary class TestInstance: def test_create_delete(self, ec2_client, instance): (ref,", "Version 2.0 (the \"License\"). You may # not use this", "wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec): while True: now = datetime.datetime.now() timeout", "in instance_tags: if (t['Key'] == INSTANCE_TAG_KEY and t['Value'] == INSTANCE_TAG_VAL):", "'Description' in image: if INSTANCE_AMI in image['Description']: return image['ImageId'] except", "tag_present = True assert tag_present # Delete k8s resource _,", "INSTANCE_TAG_KEY = \"owner\" INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS", "order to remove the dependency on the shared subnet #", "Exception as e: logging.debug(e) @pytest.fixture def instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy()", "24) test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client)", "= random_suffix_name(\"instance-ack-test\", 24) test_vpc = get_bootstrap_resources().SharedTestVPC subnet_id = test_vpc.public_subnets.subnet_ids[0] ami_id", "def get_instance_state(ec2_client, instance_id): instance_state = None try: instance = get_instance(ec2_client,", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either #", "Delete k8s resource _, deleted = k8s.delete_custom_resource(ref, 2, 5) assert", "Licensed under the Apache License, Version 2.0 (the \"License\"). You", "= test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"] =", "Apache License, Version 2.0 (the \"License\"). You may # not", "\"\"\"Integration tests for Instance API. \"\"\" import datetime import pytest", "None assert k8s.get_resource_exists(ref) yield (ref, cr) # Delete the instance", "(ref, cr) # Delete the instance when tests complete try:", "k8s resource _, deleted = k8s.delete_custom_resource(ref, 2, 5) assert deleted", "5) assert deleted is True # Reservation still exists, but", "desired_state, timeout_sec): while True: now = datetime.datetime.now() timeout = now", "come up wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS) # Validate instance tags", "logging.debug(e) finally: return instance def get_instance_state(ec2_client, instance_id): instance_state = None", "tests for Instance API. \"\"\" import datetime import pytest import", "def instance(ec2_client): test_resource_values = REPLACEMENT_VALUES.copy() resource_name = random_suffix_name(\"instance-ack-test\", 24) test_vpc", "= \"instances\" # highly available instance type for deterministic testing", "k8s.delete_custom_resource(ref, 3, 10) assert deleted except: pass @service_marker @pytest.mark.canary class", "for deterministic testing INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI = \"Amazon Linux", "{desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client, instance_id) if instance_state ==", "now + datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >= timeout: pytest.fail(f\"Timed out waiting", "# on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "resource _, deleted = k8s.delete_custom_resource(ref, 2, 5) assert deleted is", "{\"Name\": \"state\", \"Values\": ['available']}, {\"Name\": \"virtualization-type\", \"Values\": ['hvm']}, ], )", "the shared subnet # for successful test cleanup wait_for_instance_or_die(ec2_client, resource_id,", "deleted = k8s.delete_custom_resource(ref, 3, 10) assert deleted except: pass @service_marker", "compliance with the License. A copy of the # License", "e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL = \"instances\" # highly available instance", "subnet # for successful test cleanup wait_for_instance_or_die(ec2_client, resource_id, 'terminated', TIMEOUT_SECONDS)", "except in compliance with the License. A copy of the", "test_create_delete(self, ec2_client, instance): (ref, cr) = instance resource_id = cr[\"status\"][\"instanceID\"]", "when tests complete try: _, deleted = k8s.delete_custom_resource(ref, 3, 10)", "= True assert tag_present # Delete k8s resource _, deleted", "is not None assert k8s.get_resource_exists(ref) yield (ref, cr) # Delete", "to come up wait_for_instance_or_die(ec2_client, resource_id, 'running', TIMEOUT_SECONDS) # Validate instance", "instance tags instance_tags = instance[\"Tags\"] tag_present = False for t", "e: logging.debug(e) finally: return instance def get_instance_state(ec2_client, instance_id): instance_state =", "CONDITIONS OF ANY KIND, either # express or implied. See", "in the \"license\" file accompanying this file. This file is", "ref = k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name, namespace=\"default\", ) k8s.create_custom_resource(ref,", "at # # http://aws.amazon.com/apache2.0/ # # or in the \"license\"", "the License for the specific language governing # permissions and", "\"\"\" import datetime import pytest import time import logging from", "t in instance_tags: if (t['Key'] == INSTANCE_TAG_KEY and t['Value'] ==", "= ec2_client.describe_images( Owners=['amazon'], Filters=[ {\"Name\": \"architecture\", \"Values\": ['x86_64']}, {\"Name\": \"state\",", "\"owner\" INSTANCE_TAG_VAL = \"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS = 10", "instance is not None # Give time for instance to", "Exception as e: logging.debug(e) finally: return instance def get_instance_state(ec2_client, instance_id):", "instance): (ref, cr) = instance resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) #", "= resp[\"Reservations\"][0][\"Instances\"][0] except Exception as e: logging.debug(e) finally: return instance", "test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL # Load Instance CR resource_data = load_ec2_resource(", "implied. See the License for the specific language governing #", "load_ec2_resource( \"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data) # Create k8s resource ref", "def wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec): while True: now = datetime.datetime.now()", "= \"ack-controller\" CREATE_WAIT_AFTER_SECONDS = 10 DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS =", "not None # Give time for instance to come up", "distributed # on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "this file. This file is distributed # on an \"AS", "# express or implied. See the License for the specific", "to enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client, instance_id) if", "the License. \"\"\"Integration tests for Instance API. \"\"\" import datetime", "the specific language governing # permissions and limitations under the", "return instance def get_instance_state(ec2_client, instance_id): instance_state = None try: instance", "express or implied. See the License for the specific language", "instance[\"State\"][\"Name\"] except Exception as e: logging.debug(e) finally: return instance_state def", "= ec2_client.describe_instances( InstanceIds=[instance_id] ) instance = resp[\"Reservations\"][0][\"Instances\"][0] except Exception as", "ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id test_resource_values[\"INSTANCE_TYPE\"]", "= instance[\"State\"][\"Name\"] except Exception as e: logging.debug(e) finally: return instance_state", "to be 'terminated' in order to remove the dependency on", "resp = ec2_client.describe_instances( InstanceIds=[instance_id] ) instance = resp[\"Reservations\"][0][\"Instances\"][0] except Exception", "Instance to enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client, instance_id)", "# http://aws.amazon.com/apache2.0/ # # or in the \"license\" file accompanying", "INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL # Load Instance CR resource_data =", "# Validate instance tags instance_tags = instance[\"Tags\"] tag_present = False", "remove the dependency on the shared subnet # for successful", "for the specific language governing # permissions and limitations under", "INSTANCE_TYPE test_resource_values[\"INSTANCE_SUBNET_ID\"] = subnet_id test_resource_values[\"INSTANCE_TAG_KEY\"] = INSTANCE_TAG_KEY test_resource_values[\"INSTANCE_TAG_VAL\"] = INSTANCE_TAG_VAL", "= instance resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance exists", "on the shared subnet # for successful test cleanup wait_for_instance_or_die(ec2_client,", "get_instance(ec2_client, instance_id: str) -> dict: instance = None try: resp", "\"Values\": ['available']}, {\"Name\": \"virtualization-type\", \"Values\": ['hvm']}, ], ) for image", "<filename>test/e2e/tests/test_instance.py # Copyright Amazon.com Inc. or its affiliates. All Rights", "import logging from acktest.resources import random_suffix_name from acktest.k8s import resource", "testing INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI = \"Amazon Linux 2 Kernel\"", "['available']}, {\"Name\": \"virtualization-type\", \"Values\": ['hvm']}, ], ) for image in", "Create k8s resource ref = k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, resource_name,", "# or in the \"license\" file accompanying this file. This", "instance_id) instance_state = instance[\"State\"][\"Name\"] except Exception as e: logging.debug(e) finally:", "instance_state = get_instance_state(ec2_client, instance_id) if instance_state == desired_state: break def", "assert instance is not None # Give time for instance", "for Instance to enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state = get_instance_state(ec2_client,", "still exists, but instance will commence termination # State needs", "but instance will commence termination # State needs to be", "dict: instance = None try: resp = ec2_client.describe_instances( InstanceIds=[instance_id] )", "\"license\" file accompanying this file. This file is distributed #", "instance_id) if instance_state == desired_state: break def get_ami_id(ec2_client): try: #", "may # not use this file except in compliance with", "load_ec2_resource from e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources RESOURCE_PLURAL", "timeout = now + datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >= timeout: pytest.fail(f\"Timed", "test_vpc.public_subnets.subnet_ids[0] ami_id = get_ami_id(ec2_client) test_resource_values[\"INSTANCE_NAME\"] = resource_name test_resource_values[\"INSTANCE_AMI_ID\"] = ami_id", "def test_create_delete(self, ec2_client, instance): (ref, cr) = instance resource_id =", "random_suffix_name from acktest.k8s import resource as k8s from e2e import", "# Delete the instance when tests complete try: _, deleted", "-> dict: instance = None try: resp = ec2_client.describe_instances( InstanceIds=[instance_id]", "or in the \"license\" file accompanying this file. This file", "# Create k8s resource ref = k8s.CustomResourceReference( CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL,", "while True: now = datetime.datetime.now() timeout = now + datetime.timedelta(seconds=timeout_sec)", "\"instances\" # highly available instance type for deterministic testing INSTANCE_TYPE", "time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance exists instance = get_instance(ec2_client, resource_id) assert", "highly available instance type for deterministic testing INSTANCE_TYPE = \"m4.large\"", "pytest import time import logging from acktest.resources import random_suffix_name from", "image['Description']: return image['ImageId'] except Exception as e: logging.debug(e) @pytest.fixture def", "# permissions and limitations under the License. \"\"\"Integration tests for", "complete try: _, deleted = k8s.delete_custom_resource(ref, 3, 10) assert deleted", "additional_replacements=test_resource_values, ) logging.debug(resource_data) # Create k8s resource ref = k8s.CustomResourceReference(", "instance_id, desired_state, timeout_sec): while True: now = datetime.datetime.now() timeout =", "assert deleted except: pass @service_marker @pytest.mark.canary class TestInstance: def test_create_delete(self,", "KIND, either # express or implied. See the License for", "300 def get_instance(ec2_client, instance_id: str) -> dict: instance = None", "['hvm']}, ], ) for image in resp['Images']: if 'Description' in", "service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource from e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources", "(ref, cr) = instance resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check", "Amazon.com Inc. or its affiliates. All Rights Reserved. # #", "CRD_VERSION, load_ec2_resource from e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources", "assert deleted is True # Reservation still exists, but instance", "in order to remove the dependency on the shared subnet", "Load Instance CR resource_data = load_ec2_resource( \"instance\", additional_replacements=test_resource_values, ) logging.debug(resource_data)", "finally: return instance_state def wait_for_instance_or_die(ec2_client, instance_id, desired_state, timeout_sec): while True:", "CRD_GROUP, CRD_VERSION, load_ec2_resource from e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources import", "None try: resp = ec2_client.describe_instances( InstanceIds=[instance_id] ) instance = resp[\"Reservations\"][0][\"Instances\"][0]", "Delete the instance when tests complete try: _, deleted =", "file is distributed # on an \"AS IS\" BASIS, WITHOUT", "image['ImageId'] except Exception as e: logging.debug(e) @pytest.fixture def instance(ec2_client): test_resource_values", "_, deleted = k8s.delete_custom_resource(ref, 2, 5) assert deleted is True", "# State needs to be 'terminated' in order to remove", "its affiliates. All Rights Reserved. # # Licensed under the", "resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance exists instance =", "try: # Use latest AL2 resp = ec2_client.describe_images( Owners=['amazon'], Filters=[", "cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS) # Check Instance exists instance = get_instance(ec2_client, resource_id)", "waiting for Instance to enter {desired_state} state\") time.sleep(DELETE_WAIT_AFTER_SECONDS) instance_state =", "is True # Reservation still exists, but instance will commence", "= get_instance(ec2_client, resource_id) assert instance is not None # Give", "available instance type for deterministic testing INSTANCE_TYPE = \"m4.large\" INSTANCE_AMI", "acktest.k8s import resource as k8s from e2e import service_marker, CRD_GROUP,", "assert tag_present # Delete k8s resource _, deleted = k8s.delete_custom_resource(ref,", "= get_instance_state(ec2_client, instance_id) if instance_state == desired_state: break def get_ami_id(ec2_client):", "= now + datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >= timeout: pytest.fail(f\"Timed out", "file accompanying this file. This file is distributed # on", "3, 10) assert deleted except: pass @service_marker @pytest.mark.canary class TestInstance:", "class TestInstance: def test_create_delete(self, ec2_client, instance): (ref, cr) = instance", "License, Version 2.0 (the \"License\"). You may # not use", "is distributed # on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "['x86_64']}, {\"Name\": \"state\", \"Values\": ['available']}, {\"Name\": \"virtualization-type\", \"Values\": ['hvm']}, ],", "= 10 DELETE_WAIT_AFTER_SECONDS = 10 TIMEOUT_SECONDS = 300 def get_instance(ec2_client,", "for image in resp['Images']: if 'Description' in image: if INSTANCE_AMI", "datetime.timedelta(seconds=timeout_sec) if datetime.datetime.now() >= timeout: pytest.fail(f\"Timed out waiting for Instance", "get_instance_state(ec2_client, instance_id) if instance_state == desired_state: break def get_ami_id(ec2_client): try:", "located at # # http://aws.amazon.com/apache2.0/ # # or in the", "ec2_client, instance): (ref, cr) = instance resource_id = cr[\"status\"][\"instanceID\"] time.sleep(CREATE_WAIT_AFTER_SECONDS)", "instance_state = None try: instance = get_instance(ec2_client, instance_id) instance_state =", "import random_suffix_name from acktest.k8s import resource as k8s from e2e", "e2e import service_marker, CRD_GROUP, CRD_VERSION, load_ec2_resource from e2e.replacement_values import REPLACEMENT_VALUES", "is not None # Give time for instance to come", "affiliates. All Rights Reserved. # # Licensed under the Apache", "License. \"\"\"Integration tests for Instance API. \"\"\" import datetime import", "\"Values\": ['x86_64']}, {\"Name\": \"state\", \"Values\": ['available']}, {\"Name\": \"virtualization-type\", \"Values\": ['hvm']},", "\"m4.large\" INSTANCE_AMI = \"Amazon Linux 2 Kernel\" INSTANCE_TAG_KEY = \"owner\"", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "RESOURCE_PLURAL = \"instances\" # highly available instance type for deterministic", "import time import logging from acktest.resources import random_suffix_name from acktest.k8s", "get_bootstrap_resources RESOURCE_PLURAL = \"instances\" # highly available instance type for", "return image['ImageId'] except Exception as e: logging.debug(e) @pytest.fixture def instance(ec2_client):", "except Exception as e: logging.debug(e) finally: return instance_state def wait_for_instance_or_die(ec2_client,", "deleted except: pass @service_marker @pytest.mark.canary class TestInstance: def test_create_delete(self, ec2_client,", "get_ami_id(ec2_client): try: # Use latest AL2 resp = ec2_client.describe_images( Owners=['amazon'],", "Owners=['amazon'], Filters=[ {\"Name\": \"architecture\", \"Values\": ['x86_64']}, {\"Name\": \"state\", \"Values\": ['available']},", "\"Values\": ['hvm']}, ], ) for image in resp['Images']: if 'Description'", "{\"Name\": \"architecture\", \"Values\": ['x86_64']}, {\"Name\": \"state\", \"Values\": ['available']}, {\"Name\": \"virtualization-type\",", "True assert tag_present # Delete k8s resource _, deleted =", "INSTANCE_TAG_VAL): tag_present = True assert tag_present # Delete k8s resource" ]
[ "('Moda', 'Moda'), ('Deporte y ocio', 'Deporte y ocio'), ('Videojuegos', 'Videojuegos'),", "validators.EqualTo('password', message='Las contraseñas no coinciden') ]) submit = SubmitField('Cambiar contraseña')", "nombre del producto son 50 carácteres')]) price = DecimalField('Precio (€)',", "25 carácteres')]) email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un", "lastname = StringField('Apellidos', [ validators.DataRequired(message='Es necesario introducir apellidos'), validators.Length(min=4, max=50,", "descendente') ]) distancia = StringField('Distancia') submit = SubmitField('Buscar') class Review(FlaskForm):", "= StringField('Username', [ # validators.Length(min=4, max=25, message='El nombre de usuario", "'90') ]) ordenacion = SelectField('Ordenación de Resultados', choices = [", "Structure of the Login form class LoginForm(Form): email = StringField('Email',", "a 999.999,99 €)')]) category = SelectField('Categoría', choices = [ ('Automoción',", "StringField('Username', [ # validators.Length(min=4, max=25, message='El nombre de usuario debe", "'Automoción'), ('Informática', 'Informática'), ('Moda', 'Moda'), ('Deporte y ocio', 'Deporte y", "= StringField('Palabras Clave') minprice = StringField('Precio Mínimo') maxprice = StringField('Precio", "event will occur') resultados = SelectField('Resultados Por Página', choices =", "ordenacion = SelectField('Ordenación de Resultados', choices = [ ('published ASC',", "puntuación entre 1 y 5'), validators.NumberRange(min=1, max=5, message='La puntuación debe", "'Time that the event will occur') maxpublished = DateField('Start', format", "email no puede contener más de 50 carácteres')]) password =", "DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente')] status = SelectField('Estado',", "= SelectField('Ordenación de Resultados', choices = [ ('published ASC', 'Fecha", "pictures = HiddenField(\"Imágenes\") # mimes = HiddenField(\"Formatos de imagen\") name", "import DataRequired # Structure of the Login form class LoginForm(Form):", "submit = SubmitField('Guardar cambios') class EditLocation(FlaskForm): lat = HiddenField('Latitud', [", "necesario introducir apellidos'), validators.Length(min=4, max=50, message='El tamaño máximo del nombre", "'Foto y audio'] category = SelectField('Categoría', choices = [ ('Automoción',", "category = SelectField('Categoría', choices = [ ('Automoción', 'Automoción'), ('Informática', 'Informática'),", "Anterior', [ validators.DataRequired(message='Es necesario introducir una contraseña') ]) password =", "'Libros y música', 'Hogar y jardín', 'Foto y audio'] category", "'75', '90'] ordenacionlist = [('published ASC', 'Fecha (Más viejos primero)'),", "coinciden') ]) # Structure of the Login form class RestorePasswordForm(Form):", "[ validators.EqualTo('email', message='Los correos no coinciden') ]) submit = SubmitField('Cambiar", "la contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden') ]) submit", "= HiddenField(\"Imágenes\") # mimes = HiddenField(\"Formatos de imagen\") name =", "SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar cuenta\") # Structure", "message='El tamaño máximo del nombre del producto son 50 carácteres')])", "ha podido obtener la nueva localización') ]) lng = HiddenField('Longitud',", "8 caracteres') ]) confirm = PasswordField('Confirme la contraseña', [ validators.EqualTo('password',", "intoducido no es válido (de 0 € a 999.999,99 €)')])", "('Mal comportamiento', 'Mal comportamiento'), ('Artículo defectuoso', 'Artículo defectuoso'), ('Otros', 'Otros')])", "introducir un email'), validators.Length(min=1, max=50, message='El email no puede contener", "jardín', 'Foto y audio'] category = SelectField('Categoría', choices = [", "validators.EqualTo('email', message='Los correos no coinciden') ]) submit = SubmitField('Cambiar correo')", "'Precio Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente')", "máximo del nombre del producto son 50 carácteres')]) price =", "necesario introducir un nombre de producto'), validators.Length(min=1, max=50, message='El tamaño", "puntuación debe ser de 1 a 5 estrellas')]) comment =", "contraseña') class EditEmail(FlaskForm): email = StringField('Correo electrónico', [ validators.DataRequired(message='Es necesario", "('distance ASC', 'Distancia Ascendente'), ('price ASC', 'Precio Ascendente'), ('price DESC',", "de producto'), validators.Length(min=1, max=50, message='El tamaño máximo del nombre del", "StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')]) password = PasswordField('<PASSWORD>',", "debe tener entre 4 y 25 carácteres')]) email = StringField('Email',", "se ha podido obtener la nueva localización') ]) lng =", "max=50, message='El tamaño máximo del nombre son 50 carácteres')]) lastname", "producto', [ validators.DataRequired(message='Es necesario introducir un nombre de producto'), validators.Length(min=1,", "(€)', [ validators.DataRequired(message='Es necesario introducir un precio'), validators.NumberRange(min=0, max=1000000, message='El", "message='El nombre de usuario debe tener entre 4 y 25", "al menos 8 caracteres') ]) confirm = PasswordField('Confirmar Contraseña', [", "event will occur', validators= [validators.Optional()] ) submit = SubmitField('Publicar') class", "Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente')] status", "validators.DataRequired(message='Es necesario introducir una puntuación entre 1 y 5'), validators.NumberRange(min=1,", "correo') class EditPicture(FlaskForm): picture = FileField('Imagen de perfil') submit =", "('Informática', 'Informática'), ('Moda', 'Moda'), ('Deporte y ocio', 'Deporte y ocio'),", "comment = TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario escribir un comentario')]) submit", "a la cita', 'No acudió a la cita'), ('Mal comportamiento',", "password = PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario introducir una contraseña'), validators.Length(min=8,", "class EditEmail(FlaskForm): email = StringField('Correo electrónico', [ validators.DataRequired(message='Es necesario introducir", "'Moda', 'Deporte y ocio', 'Videojuegos', 'Libros y música', 'Hogar y", "música', 'Hogar y jardín', 'Foto y audio'] category = SelectField('Categoría',", "'30'), ('45', '45'), ('60', '60'), ('75', '75'), ('90', '90') ])", "carácteres')]) confirm = StringField('Confirmar correo electrónico', [ validators.EqualTo('email', message='Los correos", "from wtforms import Form, StringField, PasswordField, BooleanField, SubmitField, IntegerField, validators,", "SelectField('Ordenación de Resultados', choices = [ ('published ASC', 'Fecha (Más", "lng = HiddenField('Longitud') enddate = DateField('End', format = '%Y-%m-%d', description", "coinciden') ]) submit = SubmitField('Cambiar correo') class EditPicture(FlaskForm): picture =", "de Resultados', choices = [ ('published ASC', 'Fecha (Más viejos", "Structure of the Subir Anuncio form class SubirAnuncioForm(FlaskForm): # pictures", "y audio', 'Foto y audio') ], validators = [ validators.DataRequired(message='Es", "validators.NumberRange(min=0, max=1000000, message='El precio intoducido no es válido (de 0", "y audio', 'Foto y audio') ]) estados = [('en venta',", "necesario introducir un email'), validators.Length(min=1, max=50, message='El email no puede", "'Popularidad descendente')] status = SelectField('Estado', choices = [ ('en venta','En", "'60'), ('75', '75'), ('90', '90') ]) ordenacion = SelectField('Ordenación de", "del nombre son 50 carácteres')]) # username = StringField('Username', [", "remember_me = BooleanField('Recuerdame') submit = SubmitField('Iniciar Sesión') # Structure of", "TextAreaField('Descripción del informe', [ validators.DataRequired(message='Es necesario escribir una descripción')]) submit", "necesario introducir un email')]) submit = SubmitField(\"Correo de Recuperación\") class", "('Hogar y jardín', 'Hogar y jardín'), ('Foto y audio', 'Foto", "y jardín', 'Foto y audio'] category = SelectField('Categoría', choices =", "validators.DataRequired(message='Es necesario una contraseña'), validators.Length(min=8, message='La contraseña debe tener al", "producto'), validators.Length(min=1, max=50, message='El tamaño máximo del nombre del producto", "the Subir Anuncio form class SubirAnuncioForm(FlaskForm): # pictures = HiddenField(\"Imágenes\")", "that the event will occur') maxpublished = DateField('Start', format =", "# Structure of the Register form class RegisterForm(Form): name =", "SubirAnuncioForm(FlaskForm): # pictures = HiddenField(\"Imágenes\") # mimes = HiddenField(\"Formatos de", "SubmitField('Establecer ubicación') class EditPassword(FlaskForm): old = PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es", "carácteres')]) lastname = StringField('Apellidos', [ validators.DataRequired(message='Es necesario introducir apellidos'), validators.Length(min=4,", "un precio'), validators.NumberRange(min=0, max=1000000, message='El precio intoducido no es válido", "carácteres')]) password = PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario una contraseña'), validators.Length(min=8,", "DESC', 'Popularidad descendente') ]) distancia = StringField('Distancia') submit = SubmitField('Buscar')", "StringField('Cantidad') submit = SubmitField('Realizar Puja') class reportForm(Form): category = SelectField('Categoría',", "= SubmitField('Realizar Puja') class reportForm(Form): category = SelectField('Categoría', choices =", "validators.DataRequired(message='Es necesario seleccionar una categoría') ]) description = TextAreaField('Descripción', [", "y audio'] category = SelectField('Categoría', choices = [ ('Automoción', 'Automoción'),", "submit = SubmitField('Iniciar Sesión') # Structure of the Register form", "bidPlacementForm(FlaskForm): amount = StringField('Cantidad') submit = SubmitField('Realizar Puja') class reportForm(Form):", "# pictures = HiddenField(\"Imágenes\") # mimes = HiddenField(\"Formatos de imagen\")", "obtener la nueva localización') ]) submit = SubmitField('Establecer ubicación') class", "class ProductSearch(Form): categories = ['Automoción', 'Informática', 'Moda', 'Deporte y ocio',", "50 carácteres')]) gender = RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')]) submit =", "will occur', validators= [validators.Optional()] ) submit = SubmitField('Publicar') class ProductSearch(Form):", "'Deporte y ocio', 'Videojuegos', 'Libros y música', 'Hogar y jardín',", "música', 'Libros y música'), ('Hogar y jardín', 'Hogar y jardín'),", "imagen\") name = StringField('Nombre del producto', [ validators.DataRequired(message='Es necesario introducir", "validators.DataRequired(message='Es necesario escribir una descripción')]) lat = HiddenField('Latitud') lng =", "descendente')] status = SelectField('Estado', choices = [ ('en venta','En Venta'),", "the Login form class LoginForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es", "FileField('Imagen de perfil') submit = SubmitField('Establecer imagen') delete = SubmitField('Eliminar", "SubmitField('Cambiar correo') class EditPicture(FlaskForm): picture = FileField('Imagen de perfil') submit", "validators.DataRequired(message='Es necesario introducir un nombre'), validators.Length(min=4, max=50, message='El tamaño máximo", "PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es necesario introducir una contraseña') ]) password", "coinciden') ]) submit = SubmitField('Cambiar contraseña') class EditEmail(FlaskForm): email =", "y ocio'), ('Videojuegos', 'Videojuegos'), ('Libros y música', 'Libros y música'),", "= 'Time that the event will occur') resultados = SelectField('Resultados", "IntegerField, validators, FileField, \\ MultipleFileField, SelectField, RadioField, HiddenField, DecimalField, TextAreaField", "categories = ['Automoción', 'Informática', 'Moda', 'Deporte y ocio', 'Videojuegos', 'Libros", "maxprice = StringField('Precio Máximo') minpublished = DateField('Start', format = '%Y-%m-%d',", "minprice = StringField('Precio Mínimo') maxprice = StringField('Precio Máximo') minpublished =", "Clave') minprice = StringField('Precio Mínimo') maxprice = StringField('Precio Máximo') minpublished", "necesario introducir una contraseña') ]) password = PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es", "= StringField('Distancia') submit = SubmitField('Buscar') class Review(FlaskForm): stars = IntegerField('Puntuación',", "tener entre 4 y 25 carácteres')]) email = StringField('Email', [", "'75'), ('90', '90') ]) ordenacion = SelectField('Ordenación de Resultados', choices", "validators.DataRequired(message='No se ha podido obtener la nueva localización') ]) submit", "HiddenField, DecimalField, TextAreaField from wtforms.fields.html5 import DateField from wtforms.validators import", "1 a 5 estrellas')]) comment = TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario", "un email')]) submit = SubmitField(\"Correo de Recuperación\") class EditProfile(FlaskForm): name", "carácteres')]) email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email'),", "'Otros')]) description = TextAreaField('Descripción del informe', [ validators.DataRequired(message='Es necesario escribir", "no coinciden') ]) # Structure of the Login form class", "carácteres')]) gender = RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar", "IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario introducir una puntuación entre 1 y", "[ validators.DataRequired(message='Es necesario introducir un nombre'), validators.Length(min=4, max=50, message='El tamaño", "= [('en venta', 'En Venta'), ('vendido', 'Vendido')] resultadosporpag = ['15',", "DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente') ]) distancia =", "RestorePasswordForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')])", "# Structure of the Subir Anuncio form class SubirAnuncioForm(FlaskForm): #", "description = TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario escribir una descripción')]) lat", "'Precio Descendente'), ('views DESC', 'Popularidad descendente')] status = SelectField('Estado', choices", "'Distancia Ascendente'), ('price ASC', 'Precio Ascendente'), ('price DESC', 'Precio Descendente'),", "de 50 carácteres')]) confirm = StringField('Confirmar correo electrónico', [ validators.EqualTo('email',", "precio intoducido no es válido (de 0 € a 999.999,99", "debe ser de 1 a 5 estrellas')]) comment = TextAreaField('Comentario',", "]) password = PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario introducir una contraseña'),", "50 carácteres')]) confirm = StringField('Confirmar correo electrónico', [ validators.EqualTo('email', message='Los", "de perfil') submit = SubmitField('Establecer imagen') delete = SubmitField('Eliminar imagen')", "necesario introducir un nombre'), validators.Length(min=4, max=50, message='El tamaño máximo del", "SubmitField('Guardar cambios') class EditLocation(FlaskForm): lat = HiddenField('Latitud', [ validators.DataRequired(message='No se", "no puede contener más de 50 carácteres')]) confirm = StringField('Confirmar", "necesario escribir una descripción')]) lat = HiddenField('Latitud') lng = HiddenField('Longitud')", "minpublished = DateField('Start', format = '%Y-%m-%d', description = 'Time that", "submit = SubmitField('Realizar Puja') class reportForm(Form): category = SelectField('Categoría', choices", "contraseña') ]) password = PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario introducir una", "= StringField('Nombre del producto', [ validators.DataRequired(message='Es necesario introducir un nombre", "(de 0 € a 999.999,99 €)')]) category = SelectField('Categoría', choices", "validators.DataRequired(message='Es necesario introducir una dirección de correo'), validators.Length(min=1, max=50, message='El", "'Moda'), ('Deporte y ocio', 'Deporte y ocio'), ('Videojuegos', 'Videojuegos'), ('Libros", "podido obtener la nueva localización') ]) lng = HiddenField('Longitud', [", "es válido (de 0 € a 999.999,99 €)')]) category =", "del producto son 50 carácteres')]) price = DecimalField('Precio (€)', [", "un email'), validators.Length(min=1, max=50, message='El email no puede contener más", "introducir un nombre'), validators.Length(min=4, max=50, message='El tamaño máximo del nombre", "'Vendido')] resultadosporpag = ['15', '30', '45', '60', '75', '90'] ordenacionlist", "enddate = DateField('End', format = '%Y-%m-%d', description = 'Time that", "ASC', 'Fecha (Más viejos primero)'), ('published DESC', 'Fecha (Más nuevos", "caracteres') ]) confirm = PasswordField('Confirmar Contraseña', [ validators.EqualTo('password', message='Las contraseñas", "ocio', 'Videojuegos', 'Libros y música', 'Hogar y jardín', 'Foto y", "= [ ('Sospecha de fraude', 'Sospecha de fraude'), ('No acudió", "Mínimo') maxprice = StringField('Precio Máximo') minpublished = DateField('Start', format =", "audio', 'Foto y audio') ], validators = [ validators.DataRequired(message='Es necesario", "message='El tamaño máximo del nombre son 50 carácteres')]) # username", "('Foto y audio', 'Foto y audio') ], validators = [", "introducir un precio'), validators.NumberRange(min=0, max=1000000, message='El precio intoducido no es", "producto son 50 carácteres')]) price = DecimalField('Precio (€)', [ validators.DataRequired(message='Es", "descripción')]) lat = HiddenField('Latitud') lng = HiddenField('Longitud') enddate = DateField('End',", "'Hogar y jardín', 'Foto y audio'] category = SelectField('Categoría', choices", "ocio', 'Deporte y ocio'), ('Videojuegos', 'Videojuegos'), ('Libros y música', 'Libros", "validators.Length(min=1, max=50, message='El tamaño máximo del nombre del producto son", "del nombre son 50 carácteres')]) gender = RadioField('Género', choices =", "'Foto y audio') ], validators = [ validators.DataRequired(message='Es necesario seleccionar", "StringField('Distancia') submit = SubmitField('Buscar') class Review(FlaskForm): stars = IntegerField('Puntuación', [", "Descendente'), ('views DESC', 'Popularidad descendente')] status = SelectField('Estado', choices =", "perfil') submit = SubmitField('Establecer imagen') delete = SubmitField('Eliminar imagen') class", "message='La puntuación debe ser de 1 a 5 estrellas')]) comment", "= SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar cuenta\") #", "5 estrellas')]) comment = TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario escribir un", "localización') ]) submit = SubmitField('Establecer ubicación') class EditPassword(FlaskForm): old =", "[ validators.DataRequired(message='Es necesario introducir una contraseña'), validators.Length(min=8, message='La contraseña debe", "max=50, message='El email no puede contener más de 50 carácteres')])", "= RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar cambios') class", "HiddenField('Latitud') lng = HiddenField('Longitud') enddate = DateField('End', format = '%Y-%m-%d',", "[ validators.DataRequired(message='Es necesario introducir un email'), validators.Length(min=1, max=50, message='El email", "= StringField('Apellidos', [ validators.DataRequired(message='Es necesario introducir apellidos'), validators.Length(min=4, max=50, message='El", "'45'), ('60', '60'), ('75', '75'), ('90', '90') ]) ordenacion =", "# mimes = HiddenField(\"Formatos de imagen\") name = StringField('Nombre del", "entre 1 y 5'), validators.NumberRange(min=1, max=5, message='La puntuación debe ser", "TextAreaField from wtforms.fields.html5 import DateField from wtforms.validators import DataRequired #", "dirección de correo'), validators.Length(min=1, max=50, message='El correo no puede contener", "validators.DataRequired(message='Es necesario introducir un precio'), validators.NumberRange(min=0, max=1000000, message='El precio intoducido", "= FileField('Imagen de perfil') submit = SubmitField('Establecer imagen') delete =", "DateField('End', format = '%Y-%m-%d', description = 'Time that the event", "max=25, message='El nombre de usuario debe tener entre 4 y", "StringField('Nombre del producto', [ validators.DataRequired(message='Es necesario introducir un nombre de", "la cita', 'No acudió a la cita'), ('Mal comportamiento', 'Mal", "nombre de usuario debe tener entre 4 y 25 carácteres')])", "informe', [ validators.DataRequired(message='Es necesario escribir una descripción')]) submit = SubmitField('Publicar", "class RegisterForm(Form): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un", "from flask_wtf import FlaskForm from wtforms import Form, StringField, PasswordField,", "name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un nombre'), validators.Length(min=4,", "FlaskForm from wtforms import Form, StringField, PasswordField, BooleanField, SubmitField, IntegerField,", "'Fecha (Más nuevos primero)'), ('distance DESC', 'Distancia Descendente'), ('distance ASC',", "de fraude'), ('No acudió a la cita', 'No acudió a", "= PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es necesario introducir una contraseña') ])", "submit = SubmitField('Buscar') class Review(FlaskForm): stars = IntegerField('Puntuación', [ validators.DataRequired(message='Es", "necesario introducir un email')]) password = PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario", "€)')]) category = SelectField('Categoría', choices = [ ('Automoción', 'Automoción'), ('Informática',", "confirm = PasswordField('Confirmar Contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden')", "Subir Anuncio form class SubirAnuncioForm(FlaskForm): # pictures = HiddenField(\"Imágenes\") #", "necesario seleccionar una categoría') ]) description = TextAreaField('Descripción', [ validators.DataRequired(message='Es", "= BooleanField('Recuerdame') submit = SubmitField('Iniciar Sesión') # Structure of the", "HiddenField(\"Imágenes\") # mimes = HiddenField(\"Formatos de imagen\") name = StringField('Nombre", "primero)'), ('distance DESC', 'Distancia Descendente'), ('distance ASC', 'Distancia Ascendente'), ('price", "('price ASC', 'Precio Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC',", "= SubmitField(\"Eliminar cuenta\") # Structure of the Subir Anuncio form", "del informe', [ validators.DataRequired(message='Es necesario escribir una descripción')]) submit =", "tamaño máximo del nombre son 50 carácteres')]) # username =", "escribir una descripción')]) lat = HiddenField('Latitud') lng = HiddenField('Longitud') enddate", "# username = StringField('Username', [ # validators.Length(min=4, max=25, message='El nombre", "Structure of the Register form class RegisterForm(Form): name = StringField('Nombre',", "se ha podido obtener la nueva localización') ]) submit =", "= DateField('End', format = '%Y-%m-%d', description = 'Time that the", "= SelectField('Categoría', choices = [ ('Sospecha de fraude', 'Sospecha de", "= StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')]) password =", "description = 'Time that the event will occur') maxpublished =", "máximo del nombre son 50 carácteres')]) # username = StringField('Username',", "'Hogar y jardín'), ('Foto y audio', 'Foto y audio') ],", "'30', '45', '60', '75', '90'] ordenacionlist = [('published ASC', 'Fecha", "message='La contraseña debe tener al menos 8 caracteres') ]) confirm", "import Form, StringField, PasswordField, BooleanField, SubmitField, IntegerField, validators, FileField, \\", "contraseñas no coinciden') ]) submit = SubmitField('Cambiar contraseña') class EditEmail(FlaskForm):", "[ ('Sospecha de fraude', 'Sospecha de fraude'), ('No acudió a", "flask_wtf import FlaskForm from wtforms import Form, StringField, PasswordField, BooleanField,", "wtforms.fields.html5 import DateField from wtforms.validators import DataRequired # Structure of", "submit = SubmitField('Publicar') class ProductSearch(Form): categories = ['Automoción', 'Informática', 'Moda',", "= StringField('Cantidad') submit = SubmitField('Realizar Puja') class reportForm(Form): category =", "SelectField('Categoría', choices = [ ('Automoción', 'Automoción'), ('Informática', 'Informática'), ('Moda', 'Moda'),", "un nombre'), validators.Length(min=4, max=50, message='El tamaño máximo del nombre son", "description = 'Time that the event will occur') resultados =", "audio'] category = SelectField('Categoría', choices = [ ('Automoción', 'Automoción'), ('Informática',", "[ # validators.Length(min=4, max=25, message='El nombre de usuario debe tener", "y 5'), validators.NumberRange(min=1, max=5, message='La puntuación debe ser de 1", "= SelectField('Estado', choices = [ ('en venta','En Venta'), ('vendido','Vendido') ])", "de fraude', 'Sospecha de fraude'), ('No acudió a la cita',", "# validators.Length(min=4, max=25, message='El nombre de usuario debe tener entre", "imagen') delete = SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar", "Venta'), ('vendido','Vendido') ]) keywords = StringField('Palabras Clave') minprice = StringField('Precio", "y música'), ('Hogar y jardín', 'Hogar y jardín'), ('Foto y", "PasswordField, BooleanField, SubmitField, IntegerField, validators, FileField, \\ MultipleFileField, SelectField, RadioField,", "FileField, \\ MultipleFileField, SelectField, RadioField, HiddenField, DecimalField, TextAreaField from wtforms.fields.html5", "EditPicture(FlaskForm): picture = FileField('Imagen de perfil') submit = SubmitField('Establecer imagen')", "# Structure of the Login form class LoginForm(Form): email =", "validators.DataRequired(message='Es necesario introducir un email')]) submit = SubmitField(\"Correo de Recuperación\")", "password = PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario introducir una contraseña')]) remember_me", "validators.DataRequired(message='Es necesario introducir un email')]) password = PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es", "carácteres')]) price = DecimalField('Precio (€)', [ validators.DataRequired(message='Es necesario introducir un", "that the event will occur', validators= [validators.Optional()] ) submit =", "son 50 carácteres')]) # username = StringField('Username', [ # validators.Length(min=4,", "correo no puede contener más de 50 carácteres')]) confirm =", "delete = SubmitField(\"Eliminar cuenta\") # Structure of the Subir Anuncio", "will occur') resultados = SelectField('Resultados Por Página', choices = [", "[ validators.DataRequired(message='Es necesario una contraseña'), validators.Length(min=8, message='La contraseña debe tener", "('en venta','En Venta'), ('vendido','Vendido') ]) keywords = StringField('Palabras Clave') minprice", "that the event will occur') resultados = SelectField('Resultados Por Página',", "50 carácteres')]) # username = StringField('Username', [ # validators.Length(min=4, max=25,", "seleccionar una categoría') ]) description = TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario", "contener más de 50 carácteres')]) confirm = StringField('Confirmar correo electrónico',", "jardín'), ('Foto y audio', 'Foto y audio') ]) estados =", "message='Las contraseñas no coinciden') ]) # Structure of the Login", "], validators = [ validators.DataRequired(message='Es necesario seleccionar una categoría') ])", "DESC', 'Distancia Descendente'), ('distance ASC', 'Distancia Ascendente'), ('price ASC', 'Precio", "y ocio', 'Deporte y ocio'), ('Videojuegos', 'Videojuegos'), ('Libros y música',", "('30', '30'), ('45', '45'), ('60', '60'), ('75', '75'), ('90', '90')", "[ ('en venta','En Venta'), ('vendido','Vendido') ]) keywords = StringField('Palabras Clave')", "Form, StringField, PasswordField, BooleanField, SubmitField, IntegerField, validators, FileField, \\ MultipleFileField,", "email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')]) password", "class DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar cuenta\") # Structure of the", "necesario introducir una dirección de correo'), validators.Length(min=1, max=50, message='El correo", "defectuoso', 'Artículo defectuoso'), ('Otros', 'Otros')]) description = TextAreaField('Descripción del informe',", "contraseñas no coinciden') ]) # Structure of the Login form", "PasswordField('Confirme la contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden') ])", "submit = SubmitField(\"Correo de Recuperación\") class EditProfile(FlaskForm): name = StringField('Nombre',", "precio'), validators.NumberRange(min=0, max=1000000, message='El precio intoducido no es válido (de", "'Time that the event will occur', validators= [validators.Optional()] ) submit", "MultipleFileField, SelectField, RadioField, HiddenField, DecimalField, TextAreaField from wtforms.fields.html5 import DateField", "del nombre son 50 carácteres')]) lastname = StringField('Apellidos', [ validators.DataRequired(message='Es", "StringField('Correo electrónico', [ validators.DataRequired(message='Es necesario introducir una dirección de correo'),", "del producto', [ validators.DataRequired(message='Es necesario introducir un nombre de producto'),", "contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden') ]) submit =", "validators= [validators.Optional()] ) submit = SubmitField('Publicar') class ProductSearch(Form): categories =", "SubmitField('Cambiar contraseña') class EditEmail(FlaskForm): email = StringField('Correo electrónico', [ validators.DataRequired(message='Es", "a la cita'), ('Mal comportamiento', 'Mal comportamiento'), ('Artículo defectuoso', 'Artículo", "y audio') ]) estados = [('en venta', 'En Venta'), ('vendido',", "comportamiento'), ('Artículo defectuoso', 'Artículo defectuoso'), ('Otros', 'Otros')]) description = TextAreaField('Descripción", "= TextAreaField('Descripción del informe', [ validators.DataRequired(message='Es necesario escribir una descripción')])", "[ validators.EqualTo('password', message='Las contraseñas no coinciden') ]) submit = SubmitField('Cambiar", "y jardín'), ('Foto y audio', 'Foto y audio') ], validators", "('75', '75'), ('90', '90') ]) ordenacion = SelectField('Ordenación de Resultados',", "nombre son 50 carácteres')]) lastname = StringField('Apellidos', [ validators.DataRequired(message='Es necesario", "son 50 carácteres')]) price = DecimalField('Precio (€)', [ validators.DataRequired(message='Es necesario", "necesario escribir un comentario')]) submit = SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm):", "de 1 a 5 estrellas')]) comment = TextAreaField('Comentario', [ validators.DataRequired(message='Es", "= StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email'), validators.Length(min=1, max=50,", "('Videojuegos', 'Videojuegos'), ('Libros y música', 'Libros y música'), ('Hogar y", "= SubmitField('Cambiar contraseña') class EditEmail(FlaskForm): email = StringField('Correo electrónico', [", "'Videojuegos'), ('Libros y música', 'Libros y música'), ('Hogar y jardín',", "of the Login form class LoginForm(Form): email = StringField('Email', [", "]) confirm = PasswordField('Confirme la contraseña', [ validators.EqualTo('password', message='Las contraseñas", "email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email'), validators.Length(min=1,", "Register form class RegisterForm(Form): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario", "'Precio Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente')]", "una contraseña') ]) password = PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario introducir", "message='Los correos no coinciden') ]) submit = SubmitField('Cambiar correo') class", "= StringField('Precio Máximo') minpublished = DateField('Start', format = '%Y-%m-%d', description", "nombre son 50 carácteres')]) # username = StringField('Username', [ #", "= PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario una contraseña'), validators.Length(min=8, message='La contraseña", "StringField('Precio Mínimo') maxprice = StringField('Precio Máximo') minpublished = DateField('Start', format", "[ validators.DataRequired(message='Es necesario introducir una puntuación entre 1 y 5'),", "introducir un email')]) password = PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario introducir", "'%Y-%m-%d', description = 'Time that the event will occur') maxpublished", "[ validators.DataRequired(message='Es necesario introducir una dirección de correo'), validators.Length(min=1, max=50,", "= [('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar cambios') class EditLocation(FlaskForm): lat =", "€ a 999.999,99 €)')]) category = SelectField('Categoría', choices = [", "venta', 'En Venta'), ('vendido', 'Vendido')] resultadosporpag = ['15', '30', '45',", "y jardín'), ('Foto y audio', 'Foto y audio') ]) estados", "nombre son 50 carácteres')]) gender = RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')])", "música'), ('Hogar y jardín', 'Hogar y jardín'), ('Foto y audio',", "estados = [('en venta', 'En Venta'), ('vendido', 'Vendido')] resultadosporpag =", "= [ ('en venta','En Venta'), ('vendido','Vendido') ]) keywords = StringField('Palabras", "tamaño máximo del nombre del producto son 50 carácteres')]) price", "necesario una contraseña'), validators.Length(min=8, message='La contraseña debe tener al menos", "= TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario escribir una descripción')]) lat =", "tamaño máximo del nombre son 50 carácteres')]) lastname = StringField('Apellidos',", "correos no coinciden') ]) submit = SubmitField('Cambiar correo') class EditPicture(FlaskForm):", "mimes = HiddenField(\"Formatos de imagen\") name = StringField('Nombre del producto',", "SubmitField('Iniciar Sesión') # Structure of the Register form class RegisterForm(Form):", "class Review(FlaskForm): stars = IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario introducir una", "nombre de producto'), validators.Length(min=1, max=50, message='El tamaño máximo del nombre", "0 € a 999.999,99 €)')]) category = SelectField('Categoría', choices =", "[validators.Optional()] ) submit = SubmitField('Publicar') class ProductSearch(Form): categories = ['Automoción',", "('distance DESC', 'Distancia Descendente'), ('distance ASC', 'Distancia Ascendente'), ('price ASC',", "[('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar cambios') class EditLocation(FlaskForm): lat = HiddenField('Latitud',", "email')]) submit = SubmitField(\"Correo de Recuperación\") class EditProfile(FlaskForm): name =", "[ ('published ASC', 'Fecha (Más viejos primero)'), ('published DESC', 'Fecha", "= HiddenField('Latitud', [ validators.DataRequired(message='No se ha podido obtener la nueva", "= SubmitField('Publicar') class ProductSearch(Form): categories = ['Automoción', 'Informática', 'Moda', 'Deporte", "= StringField('Precio Mínimo') maxprice = StringField('Precio Máximo') minpublished = DateField('Start',", "description = 'Time that the event will occur', validators= [validators.Optional()]", "username = StringField('Username', [ # validators.Length(min=4, max=25, message='El nombre de", "[ validators.EqualTo('password', message='Las contraseñas no coinciden') ]) # Structure of", "class EditProfile(FlaskForm): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un", "validators.DataRequired(message='No se ha podido obtener la nueva localización') ]) lng", "]) confirm = PasswordField('Confirmar Contraseña', [ validators.EqualTo('password', message='Las contraseñas no", "EditEmail(FlaskForm): email = StringField('Correo electrónico', [ validators.DataRequired(message='Es necesario introducir una", "message='El precio intoducido no es válido (de 0 € a", "('views DESC', 'Popularidad descendente')] status = SelectField('Estado', choices = [", "= SelectField('Resultados Por Página', choices = [ ('15', '15'), ('30',", "StringField('Apellidos', [ validators.DataRequired(message='Es necesario introducir apellidos'), validators.Length(min=4, max=50, message='El tamaño", "SelectField('Categoría', choices = [ ('Sospecha de fraude', 'Sospecha de fraude'),", "EditLocation(FlaskForm): lat = HiddenField('Latitud', [ validators.DataRequired(message='No se ha podido obtener", "de usuario debe tener entre 4 y 25 carácteres')]) email", "TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario escribir una descripción')]) lat = HiddenField('Latitud')", "EditPassword(FlaskForm): old = PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es necesario introducir una", "Contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden') ]) # Structure", "resultadosporpag = ['15', '30', '45', '60', '75', '90'] ordenacionlist =", "cita', 'No acudió a la cita'), ('Mal comportamiento', 'Mal comportamiento'),", "lat = HiddenField('Latitud') lng = HiddenField('Longitud') enddate = DateField('End', format", "'Videojuegos', 'Libros y música', 'Hogar y jardín', 'Foto y audio']", "email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')]) submit", "DecimalField('Precio (€)', [ validators.DataRequired(message='Es necesario introducir un precio'), validators.NumberRange(min=0, max=1000000,", "submit = SubmitField('Establecer imagen') delete = SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm):", "= ['Automoción', 'Informática', 'Moda', 'Deporte y ocio', 'Videojuegos', 'Libros y", "son 50 carácteres')]) gender = RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')]) submit", "un email')]) password = PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario introducir una", "('45', '45'), ('60', '60'), ('75', '75'), ('90', '90') ]) ordenacion", "BooleanField, SubmitField, IntegerField, validators, FileField, \\ MultipleFileField, SelectField, RadioField, HiddenField,", "50 carácteres')]) password = PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario una contraseña'),", "no coinciden') ]) submit = SubmitField('Cambiar correo') class EditPicture(FlaskForm): picture", "'Deporte y ocio'), ('Videojuegos', 'Videojuegos'), ('Libros y música', 'Libros y", "'Artículo defectuoso'), ('Otros', 'Otros')]) description = TextAreaField('Descripción del informe', [", "introducir un email')]) submit = SubmitField(\"Correo de Recuperación\") class EditProfile(FlaskForm):", "'45', '60', '75', '90'] ordenacionlist = [('published ASC', 'Fecha (Más", "('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente')] status =", "'Informática'), ('Moda', 'Moda'), ('Deporte y ocio', 'Deporte y ocio'), ('Videojuegos',", "imagen') class DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar cuenta\") # Structure of", "una descripción')]) lat = HiddenField('Latitud') lng = HiddenField('Longitud') enddate =", "venta','En Venta'), ('vendido','Vendido') ]) keywords = StringField('Palabras Clave') minprice =", "validators.NumberRange(min=1, max=5, message='La puntuación debe ser de 1 a 5", "= DecimalField('Precio (€)', [ validators.DataRequired(message='Es necesario introducir un precio'), validators.NumberRange(min=0,", "= HiddenField('Longitud') enddate = DateField('End', format = '%Y-%m-%d', description =", "= SubmitField('Buscar') class Review(FlaskForm): stars = IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario", "la nueva localización') ]) lng = HiddenField('Longitud', [ validators.DataRequired(message='No se", "[ validators.DataRequired(message='Es necesario introducir un nombre de producto'), validators.Length(min=1, max=50,", "[ validators.DataRequired(message='Es necesario escribir una descripción')]) submit = SubmitField('Publicar Informe')", "50 carácteres')]) lastname = StringField('Apellidos', [ validators.DataRequired(message='Es necesario introducir apellidos'),", "Valoración') class bidPlacementForm(FlaskForm): amount = StringField('Cantidad') submit = SubmitField('Realizar Puja')", "necesario introducir una contraseña')]) remember_me = BooleanField('Recuerdame') submit = SubmitField('Iniciar", "email = StringField('Correo electrónico', [ validators.DataRequired(message='Es necesario introducir una dirección", "= SubmitField('Establecer ubicación') class EditPassword(FlaskForm): old = PasswordField('Contraseña Anterior', [", "('Sospecha de fraude', 'Sospecha de fraude'), ('No acudió a la", "contraseña')]) remember_me = BooleanField('Recuerdame') submit = SubmitField('Iniciar Sesión') # Structure", "('60', '60'), ('75', '75'), ('90', '90') ]) ordenacion = SelectField('Ordenación", "event will occur') maxpublished = DateField('Start', format = '%Y-%m-%d', description", "class bidPlacementForm(FlaskForm): amount = StringField('Cantidad') submit = SubmitField('Realizar Puja') class", "# Structure of the Login form class RestorePasswordForm(Form): email =", "lat = HiddenField('Latitud', [ validators.DataRequired(message='No se ha podido obtener la", "name = StringField('Nombre del producto', [ validators.DataRequired(message='Es necesario introducir un", "= [ validators.DataRequired(message='Es necesario seleccionar una categoría') ]) description =", "podido obtener la nueva localización') ]) submit = SubmitField('Establecer ubicación')", "50 carácteres')]) price = DecimalField('Precio (€)', [ validators.DataRequired(message='Es necesario introducir", "y música', 'Hogar y jardín', 'Foto y audio'] category =", "'%Y-%m-%d', description = 'Time that the event will occur') resultados", "('Deporte y ocio', 'Deporte y ocio'), ('Videojuegos', 'Videojuegos'), ('Libros y", "no coinciden') ]) submit = SubmitField('Cambiar contraseña') class EditEmail(FlaskForm): email", "DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar cuenta\") # Structure of the Subir", "StringField('Confirmar correo electrónico', [ validators.EqualTo('email', message='Los correos no coinciden') ])", "= 'Time that the event will occur', validators= [validators.Optional()] )", "caracteres') ]) confirm = PasswordField('Confirme la contraseña', [ validators.EqualTo('password', message='Las", "tener al menos 8 caracteres') ]) confirm = PasswordField('Confirmar Contraseña',", "primero)'), ('published DESC', 'Fecha (Más nuevos primero)'), ('distance DESC', 'Distancia", "TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario escribir un comentario')]) submit = SubmitField('Publicar", "y música', 'Libros y música'), ('Hogar y jardín', 'Hogar y", "[ validators.DataRequired(message='No se ha podido obtener la nueva localización') ])", "delete = SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm): delete = SubmitField(\"Eliminar cuenta\")", "contraseña debe tener al menos 8 caracteres') ]) confirm =", "[ ('15', '15'), ('30', '30'), ('45', '45'), ('60', '60'), ('75',", "a 5 estrellas')]) comment = TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario escribir", "'Foto y audio') ]) estados = [('en venta', 'En Venta'),", "Sesión') # Structure of the Register form class RegisterForm(Form): name", "un comentario')]) submit = SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm): amount =", "of the Register form class RegisterForm(Form): name = StringField('Nombre', [", "DateField('Start', format = '%Y-%m-%d', description = 'Time that the event", "validators.EqualTo('password', message='Las contraseñas no coinciden') ]) # Structure of the", "the Register form class RegisterForm(Form): name = StringField('Nombre', [ validators.DataRequired(message='Es", "message='El email no puede contener más de 50 carácteres')]) password", "lng = HiddenField('Longitud', [ validators.DataRequired(message='No se ha podido obtener la", "]) estados = [('en venta', 'En Venta'), ('vendido', 'Vendido')] resultadosporpag", "RadioField, HiddenField, DecimalField, TextAreaField from wtforms.fields.html5 import DateField from wtforms.validators", "the event will occur') maxpublished = DateField('Start', format = '%Y-%m-%d',", "of the Subir Anuncio form class SubirAnuncioForm(FlaskForm): # pictures =", "nuevos primero)'), ('distance DESC', 'Distancia Descendente'), ('distance ASC', 'Distancia Ascendente'),", "'Precio Descendente'), ('views DESC', 'Popularidad descendente') ]) distancia = StringField('Distancia')", "audio', 'Foto y audio') ]) estados = [('en venta', 'En", "debe tener al menos 8 caracteres') ]) confirm = PasswordField('Confirmar", "RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar cambios') class EditLocation(FlaskForm):", "Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente') ])", "no es válido (de 0 € a 999.999,99 €)')]) category", "'Mal comportamiento'), ('Artículo defectuoso', 'Artículo defectuoso'), ('Otros', 'Otros')]) description =", ") submit = SubmitField('Publicar') class ProductSearch(Form): categories = ['Automoción', 'Informática',", "y ocio', 'Videojuegos', 'Libros y música', 'Hogar y jardín', 'Foto", "puede contener más de 50 carácteres')]) confirm = StringField('Confirmar correo", "necesario introducir una puntuación entre 1 y 5'), validators.NumberRange(min=1, max=5,", "of the Login form class RestorePasswordForm(Form): email = StringField('Email', [", "]) submit = SubmitField('Cambiar contraseña') class EditEmail(FlaskForm): email = StringField('Correo", "\\ MultipleFileField, SelectField, RadioField, HiddenField, DecimalField, TextAreaField from wtforms.fields.html5 import", "y audio') ], validators = [ validators.DataRequired(message='Es necesario seleccionar una", "contener más de 50 carácteres')]) password = PasswordField('Contraseña', [ validators.DataRequired(message='Es", "validators = [ validators.DataRequired(message='Es necesario seleccionar una categoría') ]) description", "[ validators.DataRequired(message='Es necesario seleccionar una categoría') ]) description = TextAreaField('Descripción',", "= PasswordField('Confirme la contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden')", "necesario introducir un precio'), validators.NumberRange(min=0, max=1000000, message='El precio intoducido no", "jardín', 'Hogar y jardín'), ('Foto y audio', 'Foto y audio')", "RegisterForm(Form): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un nombre'),", "Descendente'), ('views DESC', 'Popularidad descendente') ]) distancia = StringField('Distancia') submit", "= SelectField('Categoría', choices = [ ('Automoción', 'Automoción'), ('Informática', 'Informática'), ('Moda',", "Structure of the Login form class RestorePasswordForm(Form): email = StringField('Email',", "Resultados', choices = [ ('published ASC', 'Fecha (Más viejos primero)'),", "la nueva localización') ]) submit = SubmitField('Establecer ubicación') class EditPassword(FlaskForm):", "distancia = StringField('Distancia') submit = SubmitField('Buscar') class Review(FlaskForm): stars =", "[('published ASC', 'Fecha (Más viejos primero)'), ('published DESC', 'Fecha (Más", "ASC', 'Distancia Ascendente'), ('price ASC', 'Precio Ascendente'), ('price DESC', 'Precio", "resultados = SelectField('Resultados Por Página', choices = [ ('15', '15'),", "class EditLocation(FlaskForm): lat = HiddenField('Latitud', [ validators.DataRequired(message='No se ha podido", "ordenacionlist = [('published ASC', 'Fecha (Más viejos primero)'), ('published DESC',", "ha podido obtener la nueva localización') ]) submit = SubmitField('Establecer", "tamaño máximo del nombre son 50 carácteres')]) gender = RadioField('Género',", "import FlaskForm from wtforms import Form, StringField, PasswordField, BooleanField, SubmitField,", "BooleanField('Recuerdame') submit = SubmitField('Iniciar Sesión') # Structure of the Register", "una contraseña'), validators.Length(min=8, message='La contraseña debe tener al menos 8", "choices = [ ('Automoción', 'Automoción'), ('Informática', 'Informática'), ('Moda', 'Moda'), ('Deporte", "cuenta\") # Structure of the Subir Anuncio form class SubirAnuncioForm(FlaskForm):", "Máximo') minpublished = DateField('Start', format = '%Y-%m-%d', description = 'Time", "('Otros', 'Otros')]) description = TextAreaField('Descripción del informe', [ validators.DataRequired(message='Es necesario", "estrellas')]) comment = TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario escribir un comentario')])", "puede contener más de 50 carácteres')]) password = PasswordField('Contraseña', [", "HiddenField('Longitud', [ validators.DataRequired(message='No se ha podido obtener la nueva localización')", "<reponame>Devidence7/Break from flask_wtf import FlaskForm from wtforms import Form, StringField,", "from wtforms.validators import DataRequired # Structure of the Login form", "válido (de 0 € a 999.999,99 €)')]) category = SelectField('Categoría',", "audio') ]) estados = [('en venta', 'En Venta'), ('vendido', 'Vendido')]", "'Informática', 'Moda', 'Deporte y ocio', 'Videojuegos', 'Libros y música', 'Hogar", "class EditPassword(FlaskForm): old = PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es necesario introducir", "jardín'), ('Foto y audio', 'Foto y audio') ], validators =", "PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario una contraseña'), validators.Length(min=8, message='La contraseña debe", "menos 8 caracteres') ]) confirm = PasswordField('Confirme la contraseña', [", "DESC', 'Popularidad descendente')] status = SelectField('Estado', choices = [ ('en", "the event will occur') resultados = SelectField('Resultados Por Página', choices", "message='El tamaño máximo del nombre son 50 carácteres')]) lastname =", "999.999,99 €)')]) category = SelectField('Categoría', choices = [ ('Automoción', 'Automoción'),", "('vendido','Vendido') ]) keywords = StringField('Palabras Clave') minprice = StringField('Precio Mínimo')", "max=50, message='El tamaño máximo del nombre son 50 carácteres')]) #", "DataRequired # Structure of the Login form class LoginForm(Form): email", "apellidos'), validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50", "viejos primero)'), ('published DESC', 'Fecha (Más nuevos primero)'), ('distance DESC',", "entre 4 y 25 carácteres')]) email = StringField('Email', [ validators.DataRequired(message='Es", "la cita'), ('Mal comportamiento', 'Mal comportamiento'), ('Artículo defectuoso', 'Artículo defectuoso'),", "Recuperación\") class EditProfile(FlaskForm): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir", "class LoginForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un", "form class SubirAnuncioForm(FlaskForm): # pictures = HiddenField(\"Imágenes\") # mimes =", "electrónico', [ validators.DataRequired(message='Es necesario introducir una dirección de correo'), validators.Length(min=1,", "de imagen\") name = StringField('Nombre del producto', [ validators.DataRequired(message='Es necesario", "Login form class LoginForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario", "máximo del nombre son 50 carácteres')]) lastname = StringField('Apellidos', [", "]) description = TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario escribir una descripción')])", "= ['15', '30', '45', '60', '75', '90'] ordenacionlist = [('published", "choices = [ ('Sospecha de fraude', 'Sospecha de fraude'), ('No", "submit = SubmitField('Cambiar contraseña') class EditEmail(FlaskForm): email = StringField('Correo electrónico',", "('vendido', 'Vendido')] resultadosporpag = ['15', '30', '45', '60', '75', '90']", "= '%Y-%m-%d', description = 'Time that the event will occur')", "form class LoginForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir", "validators.DataRequired(message='Es necesario introducir una contraseña') ]) password = PasswordField('<PASSWORD>aseña', [", "picture = FileField('Imagen de perfil') submit = SubmitField('Establecer imagen') delete", "del nombre del producto son 50 carácteres')]) price = DecimalField('Precio", "occur') resultados = SelectField('Resultados Por Página', choices = [ ('15',", "confirm = StringField('Confirmar correo electrónico', [ validators.EqualTo('email', message='Los correos no", "= HiddenField(\"Formatos de imagen\") name = StringField('Nombre del producto', [", "]) # Structure of the Login form class RestorePasswordForm(Form): email", "SubmitField, IntegerField, validators, FileField, \\ MultipleFileField, SelectField, RadioField, HiddenField, DecimalField,", "'Hogar y jardín'), ('Foto y audio', 'Foto y audio') ])", "LoginForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')])", "una puntuación entre 1 y 5'), validators.NumberRange(min=1, max=5, message='La puntuación", "validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50 carácteres')])", "format = '%Y-%m-%d', description = 'Time that the event will", "de 50 carácteres')]) password = PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario una", "'Popularidad descendente') ]) distancia = StringField('Distancia') submit = SubmitField('Buscar') class", "comportamiento', 'Mal comportamiento'), ('Artículo defectuoso', 'Artículo defectuoso'), ('Otros', 'Otros')]) description", "= SubmitField('Cambiar correo') class EditPicture(FlaskForm): picture = FileField('Imagen de perfil')", "= [ ('15', '15'), ('30', '30'), ('45', '45'), ('60', '60'),", "choices = [ ('15', '15'), ('30', '30'), ('45', '45'), ('60',", "'%Y-%m-%d', description = 'Time that the event will occur', validators=", "= SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm): amount = StringField('Cantidad') submit =", "['15', '30', '45', '60', '75', '90'] ordenacionlist = [('published ASC',", "('published ASC', 'Fecha (Más viejos primero)'), ('published DESC', 'Fecha (Más", "una contraseña')]) remember_me = BooleanField('Recuerdame') submit = SubmitField('Iniciar Sesión') #", "= StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')]) submit =", "validators.Length(min=1, max=50, message='El correo no puede contener más de 50", "will occur') maxpublished = DateField('Start', format = '%Y-%m-%d', description =", "validators.DataRequired(message='Es necesario introducir un nombre de producto'), validators.Length(min=1, max=50, message='El", "Venta'), ('vendido', 'Vendido')] resultadosporpag = ['15', '30', '45', '60', '75',", "wtforms import Form, StringField, PasswordField, BooleanField, SubmitField, IntegerField, validators, FileField,", "= [ ('published ASC', 'Fecha (Más viejos primero)'), ('published DESC',", "HiddenField(\"Formatos de imagen\") name = StringField('Nombre del producto', [ validators.DataRequired(message='Es", "= StringField('Confirmar correo electrónico', [ validators.EqualTo('email', message='Los correos no coinciden')", "('Automoción', 'Automoción'), ('Informática', 'Informática'), ('Moda', 'Moda'), ('Deporte y ocio', 'Deporte", "'Time that the event will occur') resultados = SelectField('Resultados Por", "= SubmitField('Guardar cambios') class EditLocation(FlaskForm): lat = HiddenField('Latitud', [ validators.DataRequired(message='No", "stars = IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario introducir una puntuación entre", "= SubmitField('Establecer imagen') delete = SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm): delete", "carácteres')]) # username = StringField('Username', [ # validators.Length(min=4, max=25, message='El", "form class RestorePasswordForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir", "cita'), ('Mal comportamiento', 'Mal comportamiento'), ('Artículo defectuoso', 'Artículo defectuoso'), ('Otros',", "introducir una contraseña'), validators.Length(min=8, message='La contraseña debe tener al menos", "validators.DataRequired(message='Es necesario escribir un comentario')]) submit = SubmitField('Publicar Valoración') class", "validators.DataRequired(message='Es necesario introducir una contraseña'), validators.Length(min=8, message='La contraseña debe tener", "= TextAreaField('Comentario', [ validators.DataRequired(message='Es necesario escribir un comentario')]) submit =", "= IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario introducir una puntuación entre 1", "PasswordField('Confirmar Contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden') ]) #", "from wtforms.fields.html5 import DateField from wtforms.validators import DataRequired # Structure", "the event will occur', validators= [validators.Optional()] ) submit = SubmitField('Publicar')", "correo electrónico', [ validators.EqualTo('email', message='Los correos no coinciden') ]) submit", "('views DESC', 'Popularidad descendente') ]) distancia = StringField('Distancia') submit =", "son 50 carácteres')]) lastname = StringField('Apellidos', [ validators.DataRequired(message='Es necesario introducir", "de Recuperación\") class EditProfile(FlaskForm): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario", "8 caracteres') ]) confirm = PasswordField('Confirmar Contraseña', [ validators.EqualTo('password', message='Las", "SubmitField(\"Correo de Recuperación\") class EditProfile(FlaskForm): name = StringField('Nombre', [ validators.DataRequired(message='Es", "'Libros y música'), ('Hogar y jardín', 'Hogar y jardín'), ('Foto", "class SubirAnuncioForm(FlaskForm): # pictures = HiddenField(\"Imágenes\") # mimes = HiddenField(\"Formatos", "HiddenField('Longitud') enddate = DateField('End', format = '%Y-%m-%d', description = 'Time", "y jardín', 'Hogar y jardín'), ('Foto y audio', 'Foto y", "SubmitField('Buscar') class Review(FlaskForm): stars = IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario introducir", "un nombre de producto'), validators.Length(min=1, max=50, message='El tamaño máximo del", "introducir una puntuación entre 1 y 5'), validators.NumberRange(min=1, max=5, message='La", "'60', '75', '90'] ordenacionlist = [('published ASC', 'Fecha (Más viejos", "SelectField, RadioField, HiddenField, DecimalField, TextAreaField from wtforms.fields.html5 import DateField from", "('Libros y música', 'Libros y música'), ('Hogar y jardín', 'Hogar", "fraude'), ('No acudió a la cita', 'No acudió a la", "ubicación') class EditPassword(FlaskForm): old = PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es necesario", "= PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario introducir una contraseña'), validators.Length(min=8, message='La", "= PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario introducir una contraseña')]) remember_me =", "audio') ], validators = [ validators.DataRequired(message='Es necesario seleccionar una categoría')", "5'), validators.NumberRange(min=1, max=5, message='La puntuación debe ser de 1 a", "SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm): amount = StringField('Cantidad') submit = SubmitField('Realizar", "ASC', 'Precio Ascendente'), ('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad", "validators.Length(min=4, max=25, message='El nombre de usuario debe tener entre 4", "más de 50 carácteres')]) password = PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario", "= SubmitField('Iniciar Sesión') # Structure of the Register form class", "validators, FileField, \\ MultipleFileField, SelectField, RadioField, HiddenField, DecimalField, TextAreaField from", "= [('published ASC', 'Fecha (Más viejos primero)'), ('published DESC', 'Fecha", "StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un nombre'), validators.Length(min=4, max=50, message='El", "'Fecha (Más viejos primero)'), ('published DESC', 'Fecha (Más nuevos primero)'),", "maxpublished = DateField('Start', format = '%Y-%m-%d', description = 'Time that", "]) ordenacion = SelectField('Ordenación de Resultados', choices = [ ('published", "no puede contener más de 50 carácteres')]) password = PasswordField('Contraseña',", "introducir un nombre de producto'), validators.Length(min=1, max=50, message='El tamaño máximo", "[ validators.DataRequired(message='Es necesario introducir un email')]) password = PasswordField('<PASSWORD>', [", "StringField, PasswordField, BooleanField, SubmitField, IntegerField, validators, FileField, \\ MultipleFileField, SelectField,", "submit = SubmitField('Cambiar correo') class EditPicture(FlaskForm): picture = FileField('Imagen de", "= HiddenField('Latitud') lng = HiddenField('Longitud') enddate = DateField('End', format =", "('published DESC', 'Fecha (Más nuevos primero)'), ('distance DESC', 'Distancia Descendente'),", "y 25 carácteres')]) email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir", "SubmitField('Realizar Puja') class reportForm(Form): category = SelectField('Categoría', choices = [", "EditProfile(FlaskForm): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un nombre'),", "Anuncio form class SubirAnuncioForm(FlaskForm): # pictures = HiddenField(\"Imágenes\") # mimes", "Descendente'), ('distance ASC', 'Distancia Ascendente'), ('price ASC', 'Precio Ascendente'), ('price", "choices = [ ('en venta','En Venta'), ('vendido','Vendido') ]) keywords =", "max=1000000, message='El precio intoducido no es válido (de 0 €", "escribir un comentario')]) submit = SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm): amount", "'Distancia Descendente'), ('distance ASC', 'Distancia Ascendente'), ('price ASC', 'Precio Ascendente'),", "PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario introducir una contraseña'), validators.Length(min=8, message='La contraseña", "introducir una dirección de correo'), validators.Length(min=1, max=50, message='El correo no", "max=50, message='El tamaño máximo del nombre del producto son 50", "[ validators.DataRequired(message='Es necesario escribir una descripción')]) lat = HiddenField('Latitud') lng", "submit = SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm): amount = StringField('Cantidad') submit", "DecimalField, TextAreaField from wtforms.fields.html5 import DateField from wtforms.validators import DataRequired", "message='El tamaño máximo del nombre son 50 carácteres')]) gender =", "(Más nuevos primero)'), ('distance DESC', 'Distancia Descendente'), ('distance ASC', 'Distancia", "message='Las contraseñas no coinciden') ]) submit = SubmitField('Cambiar contraseña') class", "validators.DataRequired(message='Es necesario introducir apellidos'), validators.Length(min=4, max=50, message='El tamaño máximo del", "email'), validators.Length(min=1, max=50, message='El email no puede contener más de", "['Automoción', 'Informática', 'Moda', 'Deporte y ocio', 'Videojuegos', 'Libros y música',", "contraseña'), validators.Length(min=8, message='La contraseña debe tener al menos 8 caracteres')", "DESC', 'Fecha (Más nuevos primero)'), ('distance DESC', 'Distancia Descendente'), ('distance", "obtener la nueva localización') ]) lng = HiddenField('Longitud', [ validators.DataRequired(message='No", "old = PasswordField('Contraseña Anterior', [ validators.DataRequired(message='Es necesario introducir una contraseña')", "email')]) password = PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario introducir una contraseña')])", "validators.DataRequired(message='Es necesario introducir una contraseña')]) remember_me = BooleanField('Recuerdame') submit =", "validators.Length(min=8, message='La contraseña debe tener al menos 8 caracteres') ])", "'90'] ordenacionlist = [('published ASC', 'Fecha (Más viejos primero)'), ('published", "]) lng = HiddenField('Longitud', [ validators.DataRequired(message='No se ha podido obtener", "nombre'), validators.Length(min=4, max=50, message='El tamaño máximo del nombre son 50", "Login form class RestorePasswordForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario", "]) keywords = StringField('Palabras Clave') minprice = StringField('Precio Mínimo') maxprice", "nueva localización') ]) lng = HiddenField('Longitud', [ validators.DataRequired(message='No se ha", "una categoría') ]) description = TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario escribir", "fraude', 'Sospecha de fraude'), ('No acudió a la cita', 'No", "('No acudió a la cita', 'No acudió a la cita'),", "gender = RadioField('Género', choices = [('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar cambios')", "defectuoso'), ('Otros', 'Otros')]) description = TextAreaField('Descripción del informe', [ validators.DataRequired(message='Es", "nueva localización') ]) submit = SubmitField('Establecer ubicación') class EditPassword(FlaskForm): old", "= HiddenField('Longitud', [ validators.DataRequired(message='No se ha podido obtener la nueva", "PasswordField('<PASSWORD>', [ validators.DataRequired(message='Es necesario introducir una contraseña')]) remember_me = BooleanField('Recuerdame')", "introducir una contraseña') ]) password = PasswordField('<PASSWORD>aseña', [ validators.DataRequired(message='Es necesario", "Por Página', choices = [ ('15', '15'), ('30', '30'), ('45',", "SubmitField('Establecer imagen') delete = SubmitField('Eliminar imagen') class DeleteAccount(FlaskForm): delete =", "ProductSearch(Form): categories = ['Automoción', 'Informática', 'Moda', 'Deporte y ocio', 'Videojuegos',", "StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email'), validators.Length(min=1, max=50, message='El", "choices = [ ('published ASC', 'Fecha (Más viejos primero)'), ('published", "StringField('Palabras Clave') minprice = StringField('Precio Mínimo') maxprice = StringField('Precio Máximo')", "Ascendente'), ('price ASC', 'Precio Ascendente'), ('price DESC', 'Precio Descendente'), ('views", "]) distancia = StringField('Distancia') submit = SubmitField('Buscar') class Review(FlaskForm): stars", "correo'), validators.Length(min=1, max=50, message='El correo no puede contener más de", "1 y 5'), validators.NumberRange(min=1, max=5, message='La puntuación debe ser de", "Página', choices = [ ('15', '15'), ('30', '30'), ('45', '45'),", "categoría') ]) description = TextAreaField('Descripción', [ validators.DataRequired(message='Es necesario escribir una", "introducir una contraseña')]) remember_me = BooleanField('Recuerdame') submit = SubmitField('Iniciar Sesión')", "[ validators.DataRequired(message='Es necesario introducir una contraseña') ]) password = PasswordField('<PASSWORD>aseña',", "SubmitField(\"Eliminar cuenta\") # Structure of the Subir Anuncio form class", "= 'Time that the event will occur') maxpublished = DateField('Start',", "max=50, message='El tamaño máximo del nombre son 50 carácteres')]) gender", "'Sospecha de fraude'), ('No acudió a la cita', 'No acudió", "más de 50 carácteres')]) confirm = StringField('Confirmar correo electrónico', [", "'En Venta'), ('vendido', 'Vendido')] resultadosporpag = ['15', '30', '45', '60',", "= PasswordField('Confirmar Contraseña', [ validators.EqualTo('password', message='Las contraseñas no coinciden') ])", "('15', '15'), ('30', '30'), ('45', '45'), ('60', '60'), ('75', '75'),", "acudió a la cita', 'No acudió a la cita'), ('Mal", "'No acudió a la cita'), ('Mal comportamiento', 'Mal comportamiento'), ('Artículo", "class RestorePasswordForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es necesario introducir un", "tener al menos 8 caracteres') ]) confirm = PasswordField('Confirme la", "SelectField('Estado', choices = [ ('en venta','En Venta'), ('vendido','Vendido') ]) keywords", "category = SelectField('Categoría', choices = [ ('Sospecha de fraude', 'Sospecha", "message='El correo no puede contener más de 50 carácteres')]) confirm", "validators.Length(min=1, max=50, message='El email no puede contener más de 50", "choices = [('hombre','Hombre'),('mujer','Mujer')]) submit = SubmitField('Guardar cambios') class EditLocation(FlaskForm): lat", "debe tener al menos 8 caracteres') ]) confirm = PasswordField('Confirme", "('Foto y audio', 'Foto y audio') ]) estados = [('en", "password = PasswordField('Contraseña', [ validators.DataRequired(message='Es necesario una contraseña'), validators.Length(min=8, message='La", "('Artículo defectuoso', 'Artículo defectuoso'), ('Otros', 'Otros')]) description = TextAreaField('Descripción del", "the Login form class RestorePasswordForm(Form): email = StringField('Email', [ validators.DataRequired(message='Es", "[ ('Automoción', 'Automoción'), ('Informática', 'Informática'), ('Moda', 'Moda'), ('Deporte y ocio',", "amount = StringField('Cantidad') submit = SubmitField('Realizar Puja') class reportForm(Form): category", "[ validators.DataRequired(message='Es necesario introducir apellidos'), validators.Length(min=4, max=50, message='El tamaño máximo", "submit = SubmitField('Establecer ubicación') class EditPassword(FlaskForm): old = PasswordField('Contraseña Anterior',", "electrónico', [ validators.EqualTo('email', message='Los correos no coinciden') ]) submit =", "description = TextAreaField('Descripción del informe', [ validators.DataRequired(message='Es necesario escribir una", "reportForm(Form): category = SelectField('Categoría', choices = [ ('Sospecha de fraude',", "[('en venta', 'En Venta'), ('vendido', 'Vendido')] resultadosporpag = ['15', '30',", "ocio'), ('Videojuegos', 'Videojuegos'), ('Libros y música', 'Libros y música'), ('Hogar", "acudió a la cita'), ('Mal comportamiento', 'Mal comportamiento'), ('Artículo defectuoso',", "'15'), ('30', '30'), ('45', '45'), ('60', '60'), ('75', '75'), ('90',", "('price DESC', 'Precio Descendente'), ('views DESC', 'Popularidad descendente') ]) distancia", "StringField('Precio Máximo') minpublished = DateField('Start', format = '%Y-%m-%d', description =", "Review(FlaskForm): stars = IntegerField('Puntuación', [ validators.DataRequired(message='Es necesario introducir una puntuación", "confirm = PasswordField('Confirme la contraseña', [ validators.EqualTo('password', message='Las contraseñas no", "introducir apellidos'), validators.Length(min=4, max=50, message='El tamaño máximo del nombre son", "máximo del nombre son 50 carácteres')]) gender = RadioField('Género', choices", "= StringField('Correo electrónico', [ validators.DataRequired(message='Es necesario introducir una dirección de", "status = SelectField('Estado', choices = [ ('en venta','En Venta'), ('vendido','Vendido')", "HiddenField('Latitud', [ validators.DataRequired(message='No se ha podido obtener la nueva localización')", "max=50, message='El correo no puede contener más de 50 carácteres')])", "]) submit = SubmitField('Cambiar correo') class EditPicture(FlaskForm): picture = FileField('Imagen", "price = DecimalField('Precio (€)', [ validators.DataRequired(message='Es necesario introducir un precio'),", "= StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir un nombre'), validators.Length(min=4, max=50,", "]) submit = SubmitField('Establecer ubicación') class EditPassword(FlaskForm): old = PasswordField('Contraseña", "occur', validators= [validators.Optional()] ) submit = SubmitField('Publicar') class ProductSearch(Form): categories", "[ validators.DataRequired(message='Es necesario introducir un email')]) submit = SubmitField(\"Correo de", "necesario introducir una contraseña'), validators.Length(min=8, message='La contraseña debe tener al", "validators.DataRequired(message='Es necesario introducir un email'), validators.Length(min=1, max=50, message='El email no", "StringField('Email', [ validators.DataRequired(message='Es necesario introducir un email')]) submit = SubmitField(\"Correo", "class EditPicture(FlaskForm): picture = FileField('Imagen de perfil') submit = SubmitField('Establecer", "[ validators.DataRequired(message='Es necesario introducir un precio'), validators.NumberRange(min=0, max=1000000, message='El precio", "de correo'), validators.Length(min=1, max=50, message='El correo no puede contener más", "localización') ]) lng = HiddenField('Longitud', [ validators.DataRequired(message='No se ha podido", "4 y 25 carácteres')]) email = StringField('Email', [ validators.DataRequired(message='Es necesario", "= DateField('Start', format = '%Y-%m-%d', description = 'Time that the", "(Más viejos primero)'), ('published DESC', 'Fecha (Más nuevos primero)'), ('distance", "max=5, message='La puntuación debe ser de 1 a 5 estrellas')])", "ser de 1 a 5 estrellas')]) comment = TextAreaField('Comentario', [", "comentario')]) submit = SubmitField('Publicar Valoración') class bidPlacementForm(FlaskForm): amount = StringField('Cantidad')", "= '%Y-%m-%d', description = 'Time that the event will occur',", "[ validators.DataRequired(message='Es necesario introducir una contraseña')]) remember_me = BooleanField('Recuerdame') submit", "form class RegisterForm(Form): name = StringField('Nombre', [ validators.DataRequired(message='Es necesario introducir", "keywords = StringField('Palabras Clave') minprice = StringField('Precio Mínimo') maxprice =", "usuario debe tener entre 4 y 25 carácteres')]) email =", "DateField from wtforms.validators import DataRequired # Structure of the Login", "= [ ('Automoción', 'Automoción'), ('Informática', 'Informática'), ('Moda', 'Moda'), ('Deporte y", "SelectField('Resultados Por Página', choices = [ ('15', '15'), ('30', '30'),", "('90', '90') ]) ordenacion = SelectField('Ordenación de Resultados', choices =", "menos 8 caracteres') ]) confirm = PasswordField('Confirmar Contraseña', [ validators.EqualTo('password',", "= SubmitField(\"Correo de Recuperación\") class EditProfile(FlaskForm): name = StringField('Nombre', [", "al menos 8 caracteres') ]) confirm = PasswordField('Confirme la contraseña',", "import DateField from wtforms.validators import DataRequired # Structure of the", "SubmitField('Publicar') class ProductSearch(Form): categories = ['Automoción', 'Informática', 'Moda', 'Deporte y", "class reportForm(Form): category = SelectField('Categoría', choices = [ ('Sospecha de", "cambios') class EditLocation(FlaskForm): lat = HiddenField('Latitud', [ validators.DataRequired(message='No se ha", "wtforms.validators import DataRequired # Structure of the Login form class", "Puja') class reportForm(Form): category = SelectField('Categoría', choices = [ ('Sospecha", "una dirección de correo'), validators.Length(min=1, max=50, message='El correo no puede", "[ validators.DataRequired(message='Es necesario escribir un comentario')]) submit = SubmitField('Publicar Valoración')", "occur') maxpublished = DateField('Start', format = '%Y-%m-%d', description = 'Time" ]
[ "\"size\": 80 }, { \"name\": \"salary\", \"title\": \"Salary\", \"sortable\": True,", "= 5678 if len(sys.argv) >=2: port = sys.argv[1] ws_server =", "{ \"name\": \"start\", \"title\": \"Start\", \"sortable\": True, \"size\": 150, \"format\":", "ws.send(json.dumps(ret)) async def main_logic(websocket, path): await recv_msg(websocket) port = 5678", "\"id\", \"title\": \"ID\", \"size\": 50, \"sortable\": True, \"sortDir\": \"asc\", \"format\":", "\"format\": \"number\" }, { \"name\": \"name\", \"title\": \"Name\", \"sortable\": True", "await ws.send(json.dumps(ret)) async def recv_msg(websocket): while True: recv_text = await", "= None def get_host_ip(): try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12',", "5678 if len(sys.argv) >=2: port = sys.argv[1] ws_server = websockets.serve(main_logic,", "import asyncio import websockets import json import socket import xlrd", "svalue.append( s.row_values(r) ) ctx[i] = (sname, svalue) return ctx #生成json", "= json.loads(recv_text) await proc_msg(websocket, msg) except: ret = {'type':'error'} await", "def gen_pro(): ret = { \"header\": [ { \"name\": \"id\",", "method = msg.get('method') if method == 'host_ip': ip=get_host_ip() ret =", "} await ws.send(json.dumps(ret)) else: ret = {'type':'unknown'} await ws.send(json.dumps(ret)) async", "\"name\": \"id\", \"title\": \"ID\", \"size\": 50, \"sortable\": True, \"sortDir\": \"asc\",", "msg) except: ret = {'type':'error'} await ws.send(json.dumps(ret)) async def main_logic(websocket,", "#coding: utf-8 import sys import os import asyncio import websockets", "\"name\": \"name\", \"title\": \"Name\", \"sortable\": True }, { \"name\": \"start\",", "get_host_ip(): try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip =", "s.connect(('192.168.127.12', 65535)) ip = s.getsockname()[0] finally: s.close() return ip def", ">=2: port = sys.argv[1] ws_server = websockets.serve(main_logic, '0.0.0.0', port) asyncio.get_event_loop().run_until_complete(ws_server)", "ws.send(json.dumps(ret)) else: ret = {'type':'unknown'} await ws.send(json.dumps(ret)) async def recv_msg(websocket):", "\"size\": 150, \"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\" }, { \"name\": \"age\",", "{ \"name\": \"age\", \"title\": \"Age\", \"sortable\": True, \"size\": 80 },", "proc_msg(ws, msg): method = msg.get('method') if method == 'host_ip': ip=get_host_ip()", "failed!\" % name) for i in range(book.nsheets): s = book.sheet_by_index(i)", "\"method\":method, \"type\":'success', 'return':ip } await ws.send(json.dumps(ret)) elif method=='genpro': phd_file =", "try: book = xlrd.open_workbook(name) except: print(\"Open Excel(%s) failed!\" % name)", "}, { \"name\": \"name\", \"title\": \"Name\", \"sortable\": True }, {", "phd_file = msg.get('phd_file') if phd_file: phd_data = read_xls(phd_file) pro_file =", "def get_host_ip(): try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip", "ctx #生成json def gen_pro(): ret = { \"header\": [ {", "ret = {'type':'unknown'} await ws.send(json.dumps(ret)) async def recv_msg(websocket): while True:", "socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip = s.getsockname()[0] finally: s.close() return", "s = book.sheet_by_index(i) sname = s.name svalue = list() for", "await ws.send(json.dumps(ret)) elif method=='genpro': phd_file = msg.get('phd_file') if phd_file: phd_data", "s.row_values(r) ) ctx[i] = (sname, svalue) return ctx #生成json def", "} await ws.send(json.dumps(ret)) elif method=='genpro': phd_file = msg.get('phd_file') if phd_file:", "'host_ip': ip=get_host_ip() ret = { \"method\":method, \"type\":'success', 'return':ip } await", "50, \"sortable\": True, \"sortDir\": \"asc\", \"format\": \"number\" }, { \"name\":", ") ctx[i] = (sname, svalue) return ctx #生成json def gen_pro():", "= {'type':'error'} await ws.send(json.dumps(ret)) async def main_logic(websocket, path): await recv_msg(websocket)", "{ \"header\": [ { \"name\": \"id\", \"title\": \"ID\", \"size\": 50,", "read_xls(phd_file) pro_file = msg.get('pro_file') if pro_file: pro_data = read_xls(pro_file) data", "= msg.get('pro_file') if pro_file: pro_data = read_xls(pro_file) data = gen_pro()", "async def main_logic(websocket, path): await recv_msg(websocket) port = 5678 if", "def main_logic(websocket, path): await recv_msg(websocket) port = 5678 if len(sys.argv)", "method=='genpro': phd_file = msg.get('phd_file') if phd_file: phd_data = read_xls(phd_file) pro_file", "= xlrd.open_workbook(name) except: print(\"Open Excel(%s) failed!\" % name) for i", "if len(sys.argv) >=2: port = sys.argv[1] ws_server = websockets.serve(main_logic, '0.0.0.0',", "recv_text = await websocket.recv() try: msg = json.loads(recv_text) await proc_msg(websocket,", "ip = s.getsockname()[0] finally: s.close() return ip def read_xls(name): try:", "return ctx #生成json def gen_pro(): ret = { \"header\": [", "svalue) return ctx #生成json def gen_pro(): ret = { \"header\":", "}, { \"name\": \"salary\", \"title\": \"Salary\", \"sortable\": True, \"size\": 150,", "= msg.get('method') if method == 'host_ip': ip=get_host_ip() ret = {", "'return':data } await ws.send(json.dumps(ret)) else: ret = {'type':'unknown'} await ws.send(json.dumps(ret))", "path): await recv_msg(websocket) port = 5678 if len(sys.argv) >=2: port", "= None pro_data = None def get_host_ip(): try: s =", "= book.sheet_by_index(i) sname = s.name svalue = list() for r", "read_xls(pro_file) data = gen_pro() ret = { \"method\":method, \"type\":'success', 'return':data", "#生成json def gen_pro(): ret = { \"header\": [ { \"name\":", "import os import asyncio import websockets import json import socket", "\"header\": [ { \"name\": \"id\", \"title\": \"ID\", \"size\": 50, \"sortable\":", "port = 5678 if len(sys.argv) >=2: port = sys.argv[1] ws_server", "\"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\" }, { \"name\": \"age\", \"title\": \"Age\",", "= { \"header\": [ { \"name\": \"id\", \"title\": \"ID\", \"size\":", "try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip = s.getsockname()[0]", "if method == 'host_ip': ip=get_host_ip() ret = { \"method\":method, \"type\":'success',", "json.loads(recv_text) await proc_msg(websocket, msg) except: ret = {'type':'error'} await ws.send(json.dumps(ret))", "range(s.nrows): svalue.append( s.row_values(r) ) ctx[i] = (sname, svalue) return ctx", "proc_msg(websocket, msg) except: ret = {'type':'error'} await ws.send(json.dumps(ret)) async def", "import xlrd #global vars phd_data = None pro_data = None", "\"name\": \"salary\", \"title\": \"Salary\", \"sortable\": True, \"size\": 150, \"format\": \"money\",", "if pro_file: pro_data = read_xls(pro_file) data = gen_pro() ret =", "websockets import json import socket import xlrd #global vars phd_data", "\"Start\", \"sortable\": True, \"size\": 150, \"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\" },", "True }, { \"name\": \"start\", \"title\": \"Start\", \"sortable\": True, \"size\":", "def recv_msg(websocket): while True: recv_text = await websocket.recv() try: msg", "\"formatMask\": \"dd-mm-yyyy\" }, { \"name\": \"age\", \"title\": \"Age\", \"sortable\": True,", "], \"data\":[] } return ret async def proc_msg(ws, msg): method", "range(book.nsheets): s = book.sheet_by_index(i) sname = s.name svalue = list()", "Excel(%s) failed!\" % name) for i in range(book.nsheets): s =", "except: print(\"Open Excel(%s) failed!\" % name) for i in range(book.nsheets):", "sys import os import asyncio import websockets import json import", "[ { \"name\": \"id\", \"title\": \"ID\", \"size\": 50, \"sortable\": True,", "\"size\": 50, \"sortable\": True, \"sortDir\": \"asc\", \"format\": \"number\" }, {", "\"sortable\": True, \"size\": 80 }, { \"name\": \"salary\", \"title\": \"Salary\",", "phd_data = read_xls(phd_file) pro_file = msg.get('pro_file') if pro_file: pro_data =", "import json import socket import xlrd #global vars phd_data =", "data = gen_pro() ret = { \"method\":method, \"type\":'success', 'return':data }", "return ret async def proc_msg(ws, msg): method = msg.get('method') if", "{ \"method\":method, \"type\":'success', 'return':data } await ws.send(json.dumps(ret)) else: ret =", "#global vars phd_data = None pro_data = None def get_host_ip():", "65535)) ip = s.getsockname()[0] finally: s.close() return ip def read_xls(name):", "\"sortDir\": \"asc\", \"format\": \"number\" }, { \"name\": \"name\", \"title\": \"Name\",", "ws.send(json.dumps(ret)) async def recv_msg(websocket): while True: recv_text = await websocket.recv()", "\"size\": 150, \"format\": \"money\", \"show\": True } ], \"data\":[] }", "while True: recv_text = await websocket.recv() try: msg = json.loads(recv_text)", "{ \"name\": \"salary\", \"title\": \"Salary\", \"sortable\": True, \"size\": 150, \"format\":", "msg): method = msg.get('method') if method == 'host_ip': ip=get_host_ip() ret", "ret = { \"method\":method, \"type\":'success', 'return':data } await ws.send(json.dumps(ret)) else:", "True: recv_text = await websocket.recv() try: msg = json.loads(recv_text) await", "s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip = s.getsockname()[0] finally:", "{ \"name\": \"name\", \"title\": \"Name\", \"sortable\": True }, { \"name\":", "(sname, svalue) return ctx #生成json def gen_pro(): ret = {", "vars phd_data = None pro_data = None def get_host_ip(): try:", "ret = { \"method\":method, \"type\":'success', 'return':ip } await ws.send(json.dumps(ret)) elif", "\"sortable\": True, \"size\": 150, \"format\": \"money\", \"show\": True } ],", "\"name\", \"title\": \"Name\", \"sortable\": True }, { \"name\": \"start\", \"title\":", "ret = {'type':'error'} await ws.send(json.dumps(ret)) async def main_logic(websocket, path): await", "\"show\": True } ], \"data\":[] } return ret async def", "ws.send(json.dumps(ret)) elif method=='genpro': phd_file = msg.get('phd_file') if phd_file: phd_data =", "} ], \"data\":[] } return ret async def proc_msg(ws, msg):", "}, { \"name\": \"start\", \"title\": \"Start\", \"sortable\": True, \"size\": 150,", "\"title\": \"Start\", \"sortable\": True, \"size\": 150, \"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\"", "{ \"method\":method, \"type\":'success', 'return':ip } await ws.send(json.dumps(ret)) elif method=='genpro': phd_file", "= {'type':'unknown'} await ws.send(json.dumps(ret)) async def recv_msg(websocket): while True: recv_text", "svalue = list() for r in range(s.nrows): svalue.append( s.row_values(r) )", "\"title\": \"Name\", \"sortable\": True }, { \"name\": \"start\", \"title\": \"Start\",", "\"format\": \"money\", \"show\": True } ], \"data\":[] } return ret", "book = xlrd.open_workbook(name) except: print(\"Open Excel(%s) failed!\" % name) for", "s.name svalue = list() for r in range(s.nrows): svalue.append( s.row_values(r)", "\"Salary\", \"sortable\": True, \"size\": 150, \"format\": \"money\", \"show\": True }", "True, \"size\": 150, \"format\": \"money\", \"show\": True } ], \"data\":[]", "os import asyncio import websockets import json import socket import", "sname = s.name svalue = list() for r in range(s.nrows):", "80 }, { \"name\": \"salary\", \"title\": \"Salary\", \"sortable\": True, \"size\":", "pro_file = msg.get('pro_file') if pro_file: pro_data = read_xls(pro_file) data =", "\"money\", \"show\": True } ], \"data\":[] } return ret async", "\"Name\", \"sortable\": True }, { \"name\": \"start\", \"title\": \"Start\", \"sortable\":", "} return ret async def proc_msg(ws, msg): method = msg.get('method')", "\"type\":'success', 'return':data } await ws.send(json.dumps(ret)) else: ret = {'type':'unknown'} await", "import sys import os import asyncio import websockets import json", "in range(s.nrows): svalue.append( s.row_values(r) ) ctx[i] = (sname, svalue) return", "elif method=='genpro': phd_file = msg.get('phd_file') if phd_file: phd_data = read_xls(phd_file)", "s.getsockname()[0] finally: s.close() return ip def read_xls(name): try: book =", "ip=get_host_ip() ret = { \"method\":method, \"type\":'success', 'return':ip } await ws.send(json.dumps(ret))", "return ip def read_xls(name): try: book = xlrd.open_workbook(name) except: print(\"Open", "phd_data = None pro_data = None def get_host_ip(): try: s", "{ \"name\": \"id\", \"title\": \"ID\", \"size\": 50, \"sortable\": True, \"sortDir\":", "= s.getsockname()[0] finally: s.close() return ip def read_xls(name): try: book", "msg.get('method') if method == 'host_ip': ip=get_host_ip() ret = { \"method\":method,", "if phd_file: phd_data = read_xls(phd_file) pro_file = msg.get('pro_file') if pro_file:", "r in range(s.nrows): svalue.append( s.row_values(r) ) ctx[i] = (sname, svalue)", "book.sheet_by_index(i) sname = s.name svalue = list() for r in", "phd_file: phd_data = read_xls(phd_file) pro_file = msg.get('pro_file') if pro_file: pro_data", "= list() for r in range(s.nrows): svalue.append( s.row_values(r) ) ctx[i]", "await ws.send(json.dumps(ret)) async def main_logic(websocket, path): await recv_msg(websocket) port =", "150, \"format\": \"money\", \"show\": True } ], \"data\":[] } return", "else: ret = {'type':'unknown'} await ws.send(json.dumps(ret)) async def recv_msg(websocket): while", "except: ret = {'type':'error'} await ws.send(json.dumps(ret)) async def main_logic(websocket, path):", "method == 'host_ip': ip=get_host_ip() ret = { \"method\":method, \"type\":'success', 'return':ip", "websocket.recv() try: msg = json.loads(recv_text) await proc_msg(websocket, msg) except: ret", "for i in range(book.nsheets): s = book.sheet_by_index(i) sname = s.name", "ret async def proc_msg(ws, msg): method = msg.get('method') if method", "print(\"Open Excel(%s) failed!\" % name) for i in range(book.nsheets): s", "None pro_data = None def get_host_ip(): try: s = socket.socket(socket.AF_INET,", "async def proc_msg(ws, msg): method = msg.get('method') if method ==", "finally: s.close() return ip def read_xls(name): try: book = xlrd.open_workbook(name)", "for r in range(s.nrows): svalue.append( s.row_values(r) ) ctx[i] = (sname,", "\"title\": \"Age\", \"sortable\": True, \"size\": 80 }, { \"name\": \"salary\",", "{'type':'unknown'} await ws.send(json.dumps(ret)) async def recv_msg(websocket): while True: recv_text =", "= await websocket.recv() try: msg = json.loads(recv_text) await proc_msg(websocket, msg)", "msg.get('phd_file') if phd_file: phd_data = read_xls(phd_file) pro_file = msg.get('pro_file') if", "try: msg = json.loads(recv_text) await proc_msg(websocket, msg) except: ret =", "\"sortable\": True }, { \"name\": \"start\", \"title\": \"Start\", \"sortable\": True,", "json import socket import xlrd #global vars phd_data = None", "ctx[i] = (sname, svalue) return ctx #生成json def gen_pro(): ret", "\"asc\", \"format\": \"number\" }, { \"name\": \"name\", \"title\": \"Name\", \"sortable\":", "\"dd-mm-yyyy\" }, { \"name\": \"age\", \"title\": \"Age\", \"sortable\": True, \"size\":", "list() for r in range(s.nrows): svalue.append( s.row_values(r) ) ctx[i] =", "in range(book.nsheets): s = book.sheet_by_index(i) sname = s.name svalue =", "\"age\", \"title\": \"Age\", \"sortable\": True, \"size\": 80 }, { \"name\":", "= { \"method\":method, \"type\":'success', 'return':ip } await ws.send(json.dumps(ret)) elif method=='genpro':", "utf-8 import sys import os import asyncio import websockets import", "= msg.get('phd_file') if phd_file: phd_data = read_xls(phd_file) pro_file = msg.get('pro_file')", "ret = { \"header\": [ { \"name\": \"id\", \"title\": \"ID\",", "import websockets import json import socket import xlrd #global vars", "== 'host_ip': ip=get_host_ip() ret = { \"method\":method, \"type\":'success', 'return':ip }", "\"data\":[] } return ret async def proc_msg(ws, msg): method =", "= gen_pro() ret = { \"method\":method, \"type\":'success', 'return':data } await", "pro_data = None def get_host_ip(): try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)", "\"sortable\": True, \"sortDir\": \"asc\", \"format\": \"number\" }, { \"name\": \"name\",", "recv_msg(websocket): while True: recv_text = await websocket.recv() try: msg =", "\"number\" }, { \"name\": \"name\", \"title\": \"Name\", \"sortable\": True },", "gen_pro() ret = { \"method\":method, \"type\":'success', 'return':data } await ws.send(json.dumps(ret))", "}, { \"name\": \"age\", \"title\": \"Age\", \"sortable\": True, \"size\": 80", "= read_xls(phd_file) pro_file = msg.get('pro_file') if pro_file: pro_data = read_xls(pro_file)", "await proc_msg(websocket, msg) except: ret = {'type':'error'} await ws.send(json.dumps(ret)) async", "name) for i in range(book.nsheets): s = book.sheet_by_index(i) sname =", "\"method\":method, \"type\":'success', 'return':data } await ws.send(json.dumps(ret)) else: ret = {'type':'unknown'}", "main_logic(websocket, path): await recv_msg(websocket) port = 5678 if len(sys.argv) >=2:", "gen_pro(): ret = { \"header\": [ { \"name\": \"id\", \"title\":", "await recv_msg(websocket) port = 5678 if len(sys.argv) >=2: port =", "xlrd #global vars phd_data = None pro_data = None def", "\"name\": \"age\", \"title\": \"Age\", \"sortable\": True, \"size\": 80 }, {", "asyncio import websockets import json import socket import xlrd #global", "read_xls(name): try: book = xlrd.open_workbook(name) except: print(\"Open Excel(%s) failed!\" %", "= { \"method\":method, \"type\":'success', 'return':data } await ws.send(json.dumps(ret)) else: ret", "\"salary\", \"title\": \"Salary\", \"sortable\": True, \"size\": 150, \"format\": \"money\", \"show\":", "= (sname, svalue) return ctx #生成json def gen_pro(): ret =", "150, \"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\" }, { \"name\": \"age\", \"title\":", "\"Age\", \"sortable\": True, \"size\": 80 }, { \"name\": \"salary\", \"title\":", "\"type\":'success', 'return':ip } await ws.send(json.dumps(ret)) elif method=='genpro': phd_file = msg.get('phd_file')", "i in range(book.nsheets): s = book.sheet_by_index(i) sname = s.name svalue", "port = sys.argv[1] ws_server = websockets.serve(main_logic, '0.0.0.0', port) asyncio.get_event_loop().run_until_complete(ws_server) asyncio.get_event_loop().run_forever()", "True, \"size\": 80 }, { \"name\": \"salary\", \"title\": \"Salary\", \"sortable\":", "await ws.send(json.dumps(ret)) else: ret = {'type':'unknown'} await ws.send(json.dumps(ret)) async def", "= read_xls(pro_file) data = gen_pro() ret = { \"method\":method, \"type\":'success',", "= s.name svalue = list() for r in range(s.nrows): svalue.append(", "ip def read_xls(name): try: book = xlrd.open_workbook(name) except: print(\"Open Excel(%s)", "def proc_msg(ws, msg): method = msg.get('method') if method == 'host_ip':", "def read_xls(name): try: book = xlrd.open_workbook(name) except: print(\"Open Excel(%s) failed!\"", "socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip = s.getsockname()[0] finally: s.close() return ip", "\"name\": \"start\", \"title\": \"Start\", \"sortable\": True, \"size\": 150, \"format\": \"date\",", "None def get_host_ip(): try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535))", "\"sortable\": True, \"size\": 150, \"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\" }, {", "msg.get('pro_file') if pro_file: pro_data = read_xls(pro_file) data = gen_pro() ret", "recv_msg(websocket) port = 5678 if len(sys.argv) >=2: port = sys.argv[1]", "\"title\": \"ID\", \"size\": 50, \"sortable\": True, \"sortDir\": \"asc\", \"format\": \"number\"", "\"title\": \"Salary\", \"sortable\": True, \"size\": 150, \"format\": \"money\", \"show\": True", "{'type':'error'} await ws.send(json.dumps(ret)) async def main_logic(websocket, path): await recv_msg(websocket) port", "len(sys.argv) >=2: port = sys.argv[1] ws_server = websockets.serve(main_logic, '0.0.0.0', port)", "pro_data = read_xls(pro_file) data = gen_pro() ret = { \"method\":method,", "\"date\", \"formatMask\": \"dd-mm-yyyy\" }, { \"name\": \"age\", \"title\": \"Age\", \"sortable\":", "True, \"sortDir\": \"asc\", \"format\": \"number\" }, { \"name\": \"name\", \"title\":", "async def recv_msg(websocket): while True: recv_text = await websocket.recv() try:", "await websocket.recv() try: msg = json.loads(recv_text) await proc_msg(websocket, msg) except:", "msg = json.loads(recv_text) await proc_msg(websocket, msg) except: ret = {'type':'error'}", "True } ], \"data\":[] } return ret async def proc_msg(ws,", "\"ID\", \"size\": 50, \"sortable\": True, \"sortDir\": \"asc\", \"format\": \"number\" },", "s.close() return ip def read_xls(name): try: book = xlrd.open_workbook(name) except:", "= socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('192.168.127.12', 65535)) ip = s.getsockname()[0] finally: s.close()", "True, \"size\": 150, \"format\": \"date\", \"formatMask\": \"dd-mm-yyyy\" }, { \"name\":", "% name) for i in range(book.nsheets): s = book.sheet_by_index(i) sname", "pro_file: pro_data = read_xls(pro_file) data = gen_pro() ret = {", "socket import xlrd #global vars phd_data = None pro_data =", "xlrd.open_workbook(name) except: print(\"Open Excel(%s) failed!\" % name) for i in", "\"start\", \"title\": \"Start\", \"sortable\": True, \"size\": 150, \"format\": \"date\", \"formatMask\":", "'return':ip } await ws.send(json.dumps(ret)) elif method=='genpro': phd_file = msg.get('phd_file') if", "import socket import xlrd #global vars phd_data = None pro_data" ]
[ "symbolicNames:list=None): self.dfa = dfa self.literalNames = literalNames self.symbolicNames = symbolicNames", "str(i-1) def getStateString(self, s:DFAState): n = s.stateNumber baseStateStr = (", "as buf: for s in self.dfa.sortedStates(): n = 0 if", "+ str_list(s.predicates) else: return baseStateStr + \"=>\" + str(s.prediction) else:", "str_list(s.predicates) else: return baseStateStr + \"=>\" + str(s.prediction) else: return", "in range(0, n): t = s.edges[i] if t is not", "class DFASerializer(object): __slots__ = ('dfa', 'literalNames', 'symbolicNames') def __init__(self, dfa:DFA,", "DFAState class DFASerializer(object): __slots__ = ('dfa', 'literalNames', 'symbolicNames') def __init__(self,", "not None and i<=len(self.symbolicNames): return self.symbolicNames[i-1] else: return str(i-1) def", "= s.stateNumber baseStateStr = ( \":\" if s.isAcceptState else \"\")", "import DFA from antlr4.Utils import str_list from antlr4.dfa.DFAState import DFAState", "# A DFA walker that knows how to dump them", "not None and t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s)) label = self.getEdgeLabel(i)", "self.literalNames = literalNames self.symbolicNames = symbolicNames def __str__(self): if self.dfa.s0", "literalNames self.symbolicNames = symbolicNames def __str__(self): if self.dfa.s0 is None:", "from antlr4.Utils import str_list from antlr4.dfa.DFAState import DFAState class DFASerializer(object):", "def __init__(self, dfa:DFA): super().__init__(dfa, None) def getEdgeLabel(self, i:int): return \"'\"", "None and i<=len(self.literalNames): return self.literalNames[i-1] elif self.symbolicNames is not None", "dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa = dfa self.literalNames = literalNames self.symbolicNames", "= self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output = buf.getvalue()", "+ ( \"^\" if s.requiresFullContext else \"\") if s.isAcceptState: if", "for i in range(0, n): t = s.edges[i] if t", "#/ # A DFA walker that knows how to dump", "in the project root. #/ # A DFA walker that", "for s in self.dfa.sortedStates(): n = 0 if s.edges is", "in the LICENSE.txt file in the project root. #/ #", "None: return None with StringIO() as buf: for s in", "baseStateStr class LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA): super().__init__(dfa, None) def getEdgeLabel(self,", "if s.isAcceptState: if s.predicates is not None: return baseStateStr +", "('dfa', 'literalNames', 'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa =", "from antlr4 import DFA from antlr4.Utils import str_list from antlr4.dfa.DFAState", "\"EOF\" if self.literalNames is not None and i<=len(self.literalNames): return self.literalNames[i-1]", "to serialized strings.#/ from io import StringIO from antlr4 import", "self.dfa.sortedStates(): n = 0 if s.edges is not None: n", "None else: return output def getEdgeLabel(self, i:int): if i==0: return", "from antlr4.dfa.DFAState import DFAState class DFASerializer(object): __slots__ = ('dfa', 'literalNames',", "knows how to dump them to serialized strings.#/ from io", "io import StringIO from antlr4 import DFA from antlr4.Utils import", "'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa = dfa self.literalNames", "def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa = dfa self.literalNames =", "not None: n = len(s.edges) for i in range(0, n):", "if self.literalNames is not None and i<=len(self.literalNames): return self.literalNames[i-1] elif", "if self.dfa.s0 is None: return None with StringIO() as buf:", "else: return str(i-1) def getStateString(self, s:DFAState): n = s.stateNumber baseStateStr", "else \"\") + \"s\" + str(n) + ( \"^\" if", "reserved. # Use of this file is governed by the", "BSD 3-clause license that # can be found in the", "import str_list from antlr4.dfa.DFAState import DFAState class DFASerializer(object): __slots__ =", "__init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa = dfa self.literalNames = literalNames", "None and i<=len(self.symbolicNames): return self.symbolicNames[i-1] else: return str(i-1) def getStateString(self,", "= ( \":\" if s.isAcceptState else \"\") + \"s\" +", "s.edges[i] if t is not None and t.stateNumber != 0x7FFFFFFF:", "antlr4.dfa.DFAState import DFAState class DFASerializer(object): __slots__ = ('dfa', 'literalNames', 'symbolicNames')", "how to dump them to serialized strings.#/ from io import", "+ \"s\" + str(n) + ( \"^\" if s.requiresFullContext else", "return self.literalNames[i-1] elif self.symbolicNames is not None and i<=len(self.symbolicNames): return", "# can be found in the LICENSE.txt file in the", "0 if s.edges is not None: n = len(s.edges) for", "buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output = buf.getvalue() if len(output)==0:", "the LICENSE.txt file in the project root. #/ # A", "n = 0 if s.edges is not None: n =", "i in range(0, n): t = s.edges[i] if t is", "self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output = buf.getvalue() if", "is not None: return baseStateStr + \"=>\" + str_list(s.predicates) else:", "return baseStateStr + \"=>\" + str_list(s.predicates) else: return baseStateStr +", "+ str(s.prediction) else: return baseStateStr class LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA):", "None and t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s)) label = self.getEdgeLabel(i) buf.write(\"-\")", "= ('dfa', 'literalNames', 'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa", "and t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s)) label = self.getEdgeLabel(i) buf.write(\"-\") buf.write(label)", "self.dfa = dfa self.literalNames = literalNames self.symbolicNames = symbolicNames def", "return None with StringIO() as buf: for s in self.dfa.sortedStates():", "found in the LICENSE.txt file in the project root. #/", "return self.symbolicNames[i-1] else: return str(i-1) def getStateString(self, s:DFAState): n =", "dump them to serialized strings.#/ from io import StringIO from", "super().__init__(dfa, None) def getEdgeLabel(self, i:int): return \"'\" + chr(i) +", "is not None and t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s)) label =", "s.edges is not None: n = len(s.edges) for i in", "self.symbolicNames is not None and i<=len(self.symbolicNames): return self.symbolicNames[i-1] else: return", "and i<=len(self.literalNames): return self.literalNames[i-1] elif self.symbolicNames is not None and", "buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output = buf.getvalue() if len(output)==0: return", "is governed by the BSD 3-clause license that # can", "them to serialized strings.#/ from io import StringIO from antlr4", "walker that knows how to dump them to serialized strings.#/", "Project. All rights reserved. # Use of this file is", "\":\" if s.isAcceptState else \"\") + \"s\" + str(n) +", "= dfa self.literalNames = literalNames self.symbolicNames = symbolicNames def __str__(self):", "s.requiresFullContext else \"\") if s.isAcceptState: if s.predicates is not None:", "if s.isAcceptState else \"\") + \"s\" + str(n) + (", "t = s.edges[i] if t is not None and t.stateNumber", "i<=len(self.symbolicNames): return self.symbolicNames[i-1] else: return str(i-1) def getStateString(self, s:DFAState): n", "baseStateStr + \"=>\" + str_list(s.predicates) else: return baseStateStr + \"=>\"", "# # Copyright (c) 2012-2017 The ANTLR Project. All rights", "s.stateNumber baseStateStr = ( \":\" if s.isAcceptState else \"\") +", "s.isAcceptState else \"\") + \"s\" + str(n) + ( \"^\"", "if s.predicates is not None: return baseStateStr + \"=>\" +", "dfa self.literalNames = literalNames self.symbolicNames = symbolicNames def __str__(self): if", "symbolicNames def __str__(self): if self.dfa.s0 is None: return None with", "( \":\" if s.isAcceptState else \"\") + \"s\" + str(n)", "strings.#/ from io import StringIO from antlr4 import DFA from", "\"\") if s.isAcceptState: if s.predicates is not None: return baseStateStr", "s.predicates is not None: return baseStateStr + \"=>\" + str_list(s.predicates)", "s.isAcceptState: if s.predicates is not None: return baseStateStr + \"=>\"", "else \"\") if s.isAcceptState: if s.predicates is not None: return", "None) def getEdgeLabel(self, i:int): return \"'\" + chr(i) + \"'\"", "__slots__ = ('dfa', 'literalNames', 'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None):", "StringIO() as buf: for s in self.dfa.sortedStates(): n = 0", "2012-2017 The ANTLR Project. All rights reserved. # Use of", "that knows how to dump them to serialized strings.#/ from", "buf.getvalue() if len(output)==0: return None else: return output def getEdgeLabel(self,", "if s.requiresFullContext else \"\") if s.isAcceptState: if s.predicates is not", "3-clause license that # can be found in the LICENSE.txt", "buf.write(self.getStateString(s)) label = self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output", "antlr4.Utils import str_list from antlr4.dfa.DFAState import DFAState class DFASerializer(object): __slots__", "output def getEdgeLabel(self, i:int): if i==0: return \"EOF\" if self.literalNames", "is not None and i<=len(self.literalNames): return self.literalNames[i-1] elif self.symbolicNames is", "None: n = len(s.edges) for i in range(0, n): t", "else: return output def getEdgeLabel(self, i:int): if i==0: return \"EOF\"", "s:DFAState): n = s.stateNumber baseStateStr = ( \":\" if s.isAcceptState", "the BSD 3-clause license that # can be found in", "\"^\" if s.requiresFullContext else \"\") if s.isAcceptState: if s.predicates is", "None: return baseStateStr + \"=>\" + str_list(s.predicates) else: return baseStateStr", "with StringIO() as buf: for s in self.dfa.sortedStates(): n =", "= symbolicNames def __str__(self): if self.dfa.s0 is None: return None", "i==0: return \"EOF\" if self.literalNames is not None and i<=len(self.literalNames):", "is None: return None with StringIO() as buf: for s", "by the BSD 3-clause license that # can be found", "( \"^\" if s.requiresFullContext else \"\") if s.isAcceptState: if s.predicates", "this file is governed by the BSD 3-clause license that", "= s.edges[i] if t is not None and t.stateNumber !=", "is not None and i<=len(self.symbolicNames): return self.symbolicNames[i-1] else: return str(i-1)", "of this file is governed by the BSD 3-clause license", "LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA): super().__init__(dfa, None) def getEdgeLabel(self, i:int): return", "self.literalNames[i-1] elif self.symbolicNames is not None and i<=len(self.symbolicNames): return self.symbolicNames[i-1]", "antlr4 import DFA from antlr4.Utils import str_list from antlr4.dfa.DFAState import", "\"=>\" + str_list(s.predicates) else: return baseStateStr + \"=>\" + str(s.prediction)", "__init__(self, dfa:DFA): super().__init__(dfa, None) def getEdgeLabel(self, i:int): return \"'\" +", "Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. #", "elif self.symbolicNames is not None and i<=len(self.symbolicNames): return self.symbolicNames[i-1] else:", "literalNames:list=None, symbolicNames:list=None): self.dfa = dfa self.literalNames = literalNames self.symbolicNames =", "+ str(n) + ( \"^\" if s.requiresFullContext else \"\") if", "serialized strings.#/ from io import StringIO from antlr4 import DFA", "self.symbolicNames[i-1] else: return str(i-1) def getStateString(self, s:DFAState): n = s.stateNumber", "n): t = s.edges[i] if t is not None and", "be found in the LICENSE.txt file in the project root.", "+ \"=>\" + str_list(s.predicates) else: return baseStateStr + \"=>\" +", "file in the project root. #/ # A DFA walker", "file is governed by the BSD 3-clause license that #", "if s.edges is not None: n = len(s.edges) for i", "return None else: return output def getEdgeLabel(self, i:int): if i==0:", "\"\") + \"s\" + str(n) + ( \"^\" if s.requiresFullContext", "n = s.stateNumber baseStateStr = ( \":\" if s.isAcceptState else", "StringIO from antlr4 import DFA from antlr4.Utils import str_list from", "return str(i-1) def getStateString(self, s:DFAState): n = s.stateNumber baseStateStr =", "that # can be found in the LICENSE.txt file in", "i:int): if i==0: return \"EOF\" if self.literalNames is not None", "return \"EOF\" if self.literalNames is not None and i<=len(self.literalNames): return", "rights reserved. # Use of this file is governed by", "return baseStateStr class LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA): super().__init__(dfa, None) def", "and i<=len(self.symbolicNames): return self.symbolicNames[i-1] else: return str(i-1) def getStateString(self, s:DFAState):", "else: return baseStateStr class LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA): super().__init__(dfa, None)", "self.dfa.s0 is None: return None with StringIO() as buf: for", "buf.write(self.getStateString(t)) buf.write('\\n') output = buf.getvalue() if len(output)==0: return None else:", "to dump them to serialized strings.#/ from io import StringIO", "'literalNames', 'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None, symbolicNames:list=None): self.dfa = dfa", "self.symbolicNames = symbolicNames def __str__(self): if self.dfa.s0 is None: return", "def __str__(self): if self.dfa.s0 is None: return None with StringIO()", "t is not None and t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s)) label", "def getEdgeLabel(self, i:int): if i==0: return \"EOF\" if self.literalNames is", "import StringIO from antlr4 import DFA from antlr4.Utils import str_list", "# Use of this file is governed by the BSD", "\"s\" + str(n) + ( \"^\" if s.requiresFullContext else \"\")", "DFA walker that knows how to dump them to serialized", "if i==0: return \"EOF\" if self.literalNames is not None and", "license that # can be found in the LICENSE.txt file", "not None and i<=len(self.literalNames): return self.literalNames[i-1] elif self.symbolicNames is not", "governed by the BSD 3-clause license that # can be", "def getStateString(self, s:DFAState): n = s.stateNumber baseStateStr = ( \":\"", "class LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA): super().__init__(dfa, None) def getEdgeLabel(self, i:int):", "import DFAState class DFASerializer(object): __slots__ = ('dfa', 'literalNames', 'symbolicNames') def", "DFASerializer(object): __slots__ = ('dfa', 'literalNames', 'symbolicNames') def __init__(self, dfa:DFA, literalNames:list=None,", "if t is not None and t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s))", "i<=len(self.literalNames): return self.literalNames[i-1] elif self.symbolicNames is not None and i<=len(self.symbolicNames):", "str(s.prediction) else: return baseStateStr class LexerDFASerializer(DFASerializer): def __init__(self, dfa:DFA): super().__init__(dfa,", "label = self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output =", "if len(output)==0: return None else: return output def getEdgeLabel(self, i:int):", "# Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.", "= len(s.edges) for i in range(0, n): t = s.edges[i]", "A DFA walker that knows how to dump them to", "len(s.edges) for i in range(0, n): t = s.edges[i] if", "Use of this file is governed by the BSD 3-clause", "s in self.dfa.sortedStates(): n = 0 if s.edges is not", "= literalNames self.symbolicNames = symbolicNames def __str__(self): if self.dfa.s0 is", "The ANTLR Project. All rights reserved. # Use of this", "+ \"=>\" + str(s.prediction) else: return baseStateStr class LexerDFASerializer(DFASerializer): def", "project root. #/ # A DFA walker that knows how", "range(0, n): t = s.edges[i] if t is not None", "t.stateNumber != 0x7FFFFFFF: buf.write(self.getStateString(s)) label = self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\")", "self.literalNames is not None and i<=len(self.literalNames): return self.literalNames[i-1] elif self.symbolicNames", "can be found in the LICENSE.txt file in the project", "All rights reserved. # Use of this file is governed", "DFA from antlr4.Utils import str_list from antlr4.dfa.DFAState import DFAState class", "buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n') output = buf.getvalue() if len(output)==0: return None", "root. #/ # A DFA walker that knows how to", "else: return baseStateStr + \"=>\" + str(s.prediction) else: return baseStateStr", "__str__(self): if self.dfa.s0 is None: return None with StringIO() as", "in self.dfa.sortedStates(): n = 0 if s.edges is not None:", "buf.write('\\n') output = buf.getvalue() if len(output)==0: return None else: return", "\"=>\" + str(s.prediction) else: return baseStateStr class LexerDFASerializer(DFASerializer): def __init__(self,", "str_list from antlr4.dfa.DFAState import DFAState class DFASerializer(object): __slots__ = ('dfa',", "= buf.getvalue() if len(output)==0: return None else: return output def", "LICENSE.txt file in the project root. #/ # A DFA", "None with StringIO() as buf: for s in self.dfa.sortedStates(): n", "baseStateStr = ( \":\" if s.isAcceptState else \"\") + \"s\"", "str(n) + ( \"^\" if s.requiresFullContext else \"\") if s.isAcceptState:", "return output def getEdgeLabel(self, i:int): if i==0: return \"EOF\" if", "n = len(s.edges) for i in range(0, n): t =", "output = buf.getvalue() if len(output)==0: return None else: return output", "not None: return baseStateStr + \"=>\" + str_list(s.predicates) else: return", "return baseStateStr + \"=>\" + str(s.prediction) else: return baseStateStr class", "is not None: n = len(s.edges) for i in range(0,", "the project root. #/ # A DFA walker that knows", "!= 0x7FFFFFFF: buf.write(self.getStateString(s)) label = self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t))", "baseStateStr + \"=>\" + str(s.prediction) else: return baseStateStr class LexerDFASerializer(DFASerializer):", "= 0 if s.edges is not None: n = len(s.edges)", "(c) 2012-2017 The ANTLR Project. All rights reserved. # Use", "buf: for s in self.dfa.sortedStates(): n = 0 if s.edges", "dfa:DFA): super().__init__(dfa, None) def getEdgeLabel(self, i:int): return \"'\" + chr(i)", "0x7FFFFFFF: buf.write(self.getStateString(s)) label = self.getEdgeLabel(i) buf.write(\"-\") buf.write(label) buf.write(\"->\") buf.write(self.getStateString(t)) buf.write('\\n')", "from io import StringIO from antlr4 import DFA from antlr4.Utils", "getStateString(self, s:DFAState): n = s.stateNumber baseStateStr = ( \":\" if", "len(output)==0: return None else: return output def getEdgeLabel(self, i:int): if", "getEdgeLabel(self, i:int): if i==0: return \"EOF\" if self.literalNames is not", "ANTLR Project. All rights reserved. # Use of this file" ]
[ "while time.time() < start_time + time_limit: state = env.step(action) action,", "[0, 360] print(f\"Hues are: {detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg hue:", "= wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating...\")", "servos. servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets =", "_ = hardware.get_buttons() if menu or joy: break env.hardware.go_up() return", "or the menu. Returns the buttons\"\"\" while True: buttons =", "start_time + time_limit: state = env.step(action) action, info = pid_fn(state)", "with uncalibrated servos hardware.servo_offsets = (0, 0, 0) # lift", "if buttons.menu_button: return CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x =", "== \"__main__\": # Parse command line args parser = argparse.ArgumentParser()", "return CalibPos() def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20): start_time = time.time()", "return # Calibrate position pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down)", "Have a nice filename with the time and whether it", "offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True", "import dataclass, astuple from hardware import plate_angles_to_servo_positions @dataclass class CalibHue:", "indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\") if os.path.isfile(calibration_file): with", "=\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True )", "time_limit seconds, quit log.warning(f\"Servo calibration failed.\") return CalibServos() def write_calibration(calibration_dict,", "later) vel_x_hist = [1.0 for _ in range(100)] vel_y_hist =", "camera # sees (useful for debugging when the hue calibration", "start_time = time.time() action = Vector2(0, 0) # Initial high", "up fist hardware.set_angles(0, 0) # Calibrate servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\"", "stand.\\n\\n\" \"Click joystick\\nto continue.\", scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if", "0) # Calibrate servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter", "controllers import pid_controller from dataclasses import dataclass, astuple from hardware", "= 0 hue_high = 360 hue_steps = 41 # Is", "hue_calib.success else \"Hue calib:\\nfailed\\n\\n\" ) pos_str = ( f\"Position \\ncalib:\\nsuccessful\\nPosition", "small return ( np.abs(x) < max_ball_dist and np.abs(y) < max_ball_dist", "of 40 so that the steps are even img_frame, elapsed_time", "avg_hue_rad = np.arctan2(sin_mean, cos_mean) avg_hue = np.degrees(avg_hue_rad) % 360 #", "vel_x_hist = [1.0 for _ in range(100)] vel_y_hist = [1.0", "hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn, env) # Save", "calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos s1, s2, s3 =", "previous calibration.\") if os.path.isfile(calibration_file): with open(calibration_file, \"r\") as f: calibration_dict", "Vector2 from detector import hsv_detector from controllers import pid_controller from", "action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str) args, _ = parser.parse_known_args() main(args.file,", "failed.\") return CalibPos() def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20): start_time =", "from common import Vector2 from detector import hsv_detector from controllers", "hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down) if hue_calib.early_quit: hardware.go_up() return #", "env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133, 133) run_calibration(env, pid_fn,", "Tuple from common import Vector2 from detector import hsv_detector from", "= detector_fn(img_frame, hue=hue) # If we found a ball roughly", "env: env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133, 133) run_calibration(env,", "= round(y, 3) log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset),", "Reasonable default success: bool = False early_quit: bool = False", "the cache. TODO: added this while searching for a state", "f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\", scrolling=True, )", "env.step(action) action, info = pid_fn(state) (x, y, vel_x, vel_y, sum_x,", "under the MIT License. \"\"\" Calibration Controller Performs calibration for", "for a state bug detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True, filename=filename)", "+ time_limit: state = env.step(action) action, info = pid_fn(state) (x,", "position: Tuple[float, float] = (0.0, 0.0) success: bool = False", "\\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else \"(X, Y) calib:\\nfailed\\n\\n\" ) hardware.display(", "\"r\") as f: calibration_dict = json.load(f) else: # Use defaults", "menu. Returns the buttons\"\"\" while True: buttons = hardware.get_buttons() if", "calibration file {calibration_file}\") json.dump(calibration_dict, outfile, indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading", "detector_fn, is_menu_down) if hue_calib.early_quit: hardware.go_up() return # Calibrate position pos_calib", "success: bool = False early_quit: bool = False # If", "hardware.detector menu_button = False while not menu_button: img_frame, _ =", "< stationary_vel): # Calculate offsets by calculating servo positions at", "ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue, debug=True) # If", "calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20): start_time = time.time() action = Vector2(0,", "(0.0, 0.0, 0.0), } return calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1 /", "as f: calibration_dict = json.load(f) else: # Use defaults calibration_dict", "detected_hues = [] for hue in hue_options: if is_menu_down_fn(): return", "y, radius, max_ball_dist=0.045, min_ball_dist=0.01): # reject balls which are too", "we found a ball roughly in the center that is", "buttons.menu_button or buttons.joy_button: return buttons time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1 /", "> 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues) sines, cosines =", "import Vector2 from detector import hsv_detector from controllers import pid_controller", "if ball_detected and ball_close_enough(x, y, radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \"", "= pid_fn(state) (x, y, vel_x, vel_y, sum_x, sum_y), ball_detected, buttons", "env to be able to stream the calib results env", "joy: break env.hardware.go_up() return wait_for_menu_and_stream def main(calibration_file, frequency=30, debug=True): pid_fn", "doesn't currently give a good calibration raise NotImplementedError # Get", "detector_fn, hue_calib.hue, is_menu_down) if pos_calib.early_quit: hardware.go_up() return # Save calibration", "and (prev_100_y < stationary_vel): # Calculate offsets by calculating servo", "hardware.camera detector_fn = hardware.detector def is_menu_down(hardware=hardware) -> bool: return hardware.get_buttons().menu_button", "ball_close_enough(x, y, radius): x_offset = round(x, 3) y_offset = round(y,", "# Warning! This mutates the state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and", "0 hue_high = 360 hue_steps = 41 # Is 41", "menu_button: img_frame, _ = camera_fn() detector_fn(img_frame, debug=True) # Save to", "y, vel_x, vel_y, sum_x, sum_y), ball_detected, buttons = state #", "return hardware.get_buttons().menu_button # lift plate up first hardware.set_angles(0, 0) #", "= servo_calib.servos s1, s2, s3 = servo_calib.servos write_calibration(calibration_dict) # Update", "ball_detected, buttons = state # Quit on menu down if", "with the time and whether it succeeded or failed time_of_day", "__iter__(self): return iter(astuple(self)) @dataclass class CalibServos: servos: Tuple[float, float, float]", "in range(100)] # Run until the ball has stabilized or", "else: filename += f\".fail.{time_of_day}.jpg\" img_frame, _ = camera_fn() # Huemask", "41 instead of 40 so that the steps are even", "while not menu_button: img_frame, _ = camera_fn() detector_fn(img_frame, debug=True) #", "stream the calib results env = kwargs[\"env\"] hardware = env.hardware", "0.0, 0.0), } return calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30):", "= read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset, y_offset", "sum_y), ball_detected, buttons = state # Quit on menu down", "f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue) if", "_ in range(100)] vel_y_hist = [1.0 for _ in range(100)]", "debugging when the hue calibration fails) # Have a nice", "the menu. Returns the buttons\"\"\" while True: buttons = hardware.get_buttons()", "parser = argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str)", "positions at the # current stable position and subtracting the", "calibration raise NotImplementedError # Get some hidden things from env", "\"w+\") as outfile: log.info(f\"Creating calibration file {calibration_file}\") json.dump(calibration_dict, outfile, indent=4,", "or the time limit was reached while time.time() < start_time", "a new hue (hue + 1) invalidates # the cache.", "np.arctan2(sin_mean, cos_mean) avg_hue = np.degrees(avg_hue_rad) % 360 # Convert back", "\"Place ball in\\ncenter using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen ready.\" scrolling=True, )", "Controller Performs calibration for hue, center of camera position, and", "10 frames before giving up if is_menu_down_fn(): return CalibPos(early_quit=True) img_frame,", "if __name__ == \"__main__\": # Parse command line args parser", "hardware import plate_angles_to_servo_positions @dataclass class CalibHue: hue: int = 44", "[1.0 for _ in range(100)] # Run until the ball", "= round(x, 3) y_offset = round(y, 3) log.info(f\"Offset calibrated: [{x_offset:.3f},", "f\".fail.{time_of_day}.jpg\" img_frame, _ = camera_fn() # Huemask keeps an internal", "float] = (0.0, 0.0) success: bool = False early_quit: bool", "vel_y, sum_x, sum_y), ball_detected, buttons = state # Quit on", "# Calculate offsets by calculating servo positions at the #", "args parser = argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\",", "from the center and too small return ( np.abs(x) <", "menu...\", scrolling=True) elif not (pos_calib.success or hue_calib.success): # or servo_calib.success):", "typing import Tuple from common import Vector2 from detector import", "License. \"\"\" Calibration Controller Performs calibration for hue, center of", "vel_history (to use the vel_hist[-100:] later) vel_x_hist = [1.0 for", "scrolling=True, ) # When the calibration is complete, save the", "Is 41 instead of 40 so that the steps are", "sees (useful for debugging when the hue calibration fails) #", "detector_fn(img_frame, debug=True) # Save to streaming menu, joy, _, _", "keeps an internal cache. By sending a new hue (hue", "to streaming menu, joy, _, _ = hardware.get_buttons() if menu", "hardware.detector def is_menu_down(hardware=hardware) -> bool: return hardware.get_buttons().menu_button # lift plate", "quit hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down) if", "and hue_calib.success: # and servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True)", "the calibration is complete def __iter__(self): return iter(astuple(self)) @dataclass class", "radius={radius:0.3f}\" ) detected_hues.append(hue) if len(detected_hues) > 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad", "ball_detected and ball_close_enough(x, y, radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x,", "This mutates the state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and hue_calib.success: #", "= {hue_calib.hue}\\n\\n\" if hue_calib.success else \"Hue calib:\\nfailed\\n\\n\" ) pos_str =", "calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset calibration failed.\")", "action, info = pid_fn(state) (x, y, vel_x, vel_y, sum_x, sum_y),", "ball roughly in the center that is large enough if", "joystick\\nwhen ready.\" scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: #", "detect for 10 frames before giving up if is_menu_down_fn(): return", "offsets \"\"\" import os import cv2 import time import json", "list(servos - servos_zeroed) return CalibServos(servos=servo_offsets, success=True) # If the plate", "= np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean = np.mean(sines), np.mean(cosines) avg_hue_rad =", "the calibration is complete, save the image of what the", "quit log.warning(f\"Servo calibration failed.\") return CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing", "calibration is complete, save the image of what the moab", "def is_menu_down(hardware=hardware) -> bool: return hardware.get_buttons().menu_button # lift plate up", "# Calibrate servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter without\\n", "< start_time + time_limit: state = env.step(action) action, info =", "else \"Hue calib:\\nfailed\\n\\n\" ) pos_str = ( f\"Position \\ncalib:\\nsuccessful\\nPosition =", "the center that is large enough if ball_detected and ball_close_enough(x,", "= np.radians(detected_hues) sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean =", "complete, save the image of what the moab camera #", "print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True) else:", "_ in range(100)] # Run until the ball has stabilized", "# Quit on menu down if buttons.menu_button: return CalibServos(early_quit=True) if", "class CalibPos: position: Tuple[float, float] = (0.0, 0.0) success: bool", "= (0.0, 0.0, 0.0) success: bool = False early_quit: bool", "If the plate could be stabilized in time_limit seconds, quit", "(x, y, vel_x, vel_y, sum_x, sum_y), ball_detected, buttons = state", "env hardware = env.hardware camera_fn = hardware.camera detector_fn = hardware.detector", "the state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and hue_calib.success: # and servo_calib.success:", "= datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\" if hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\"", "{s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\", scrolling=True, ) print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f},", "parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str) args, _ = parser.parse_known_args() main(args.file, debug=args.debug)", "if buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib =", "the new calibration # Warning! This mutates the state! hardware.reset_calibration(calibration_file=calibration_file)", "pid_controller from dataclasses import dataclass, astuple from hardware import plate_angles_to_servo_positions", "and wait for joystick hardware.display( \"put ball on stand\\nclick joystick\",", "# Start the calibration with uncalibrated servos hardware.servo_offsets = (0,", "= np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets = list(servos -", "early_quit: bool = False # If menu is pressed before", "position pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down) if pos_calib.early_quit: hardware.go_up()", "import datetime import numpy as np import logging as log", "and too small return ( np.abs(x) < max_ball_dist and np.abs(y)", "hue_options = list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues = [] for hue", "pos_calib.position x_offset, y_offset = pos_calib.position write_calibration(calibration_dict) # Update the environment", "is pressed before the calibration is complete def __iter__(self): return", "0) # Initial high vel_history (to use the vel_hist[-100:] later)", "False early_quit: bool = False # If menu is pressed", "0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues) sines, cosines = np.sin(detected_hues_rad),", "hardware.detector # Start the calibration with uncalibrated servos hardware.servo_offsets =", "return...\\n\", scrolling=True, ) # When the calibration is complete, save", "joy_y = hardware.get_buttons() time.sleep(sleep_time) if menu_button: return def run_calibration(env, pid_fn,", "def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") # write out stuff with", "sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean = np.mean(sines), np.mean(cosines)", "detector_fn = hardware.detector menu_button = False while not menu_button: img_frame,", "# Huemask keeps an internal cache. By sending a new", "position, and servo offsets \"\"\" import os import cv2 import", "TODO: added this while searching for a state bug detector_fn(img_frame,", "env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133, 133) run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos() if", "stationary_vel) and (prev_100_y < stationary_vel): # Calculate offsets by calculating", "to use the new calibration # Warning! This mutates the", "False # If menu is pressed before the calibration is", "pid_fn = pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug) as env: env.step((0, 0))", "41 # Is 41 instead of 40 so that the", "camera_fn() detector_fn(img_frame, debug=True) # Save to streaming menu, joy, _,", "elapsed_time = camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue)", "the moab camera # sees (useful for debugging when the", "has stabilized or the time limit was reached while time.time()", "If the average velocity for the last 100 timesteps is", "s3 = servo_calib.servos write_calibration(calibration_dict) # Update the environment to use", "CalibServos(servos=servo_offsets, success=True) # If the plate could be stabilized in", "= (0, 0, 0) # lift plate up fist hardware.set_angles(0,", "hue_steps = 41 # Is 41 instead of 40 so", "calibration failed.\") return CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") #", "time_limit=20): start_time = time.time() action = Vector2(0, 0) # Initial", "import pid_controller from dataclasses import dataclass, astuple from hardware import", "for joystick hardware.display( \"put ball on stand\\nclick joystick\", # \"Place", "state bug detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True, filename=filename) hardware.go_up() def", "env = kwargs[\"env\"] hardware = env.hardware camera_fn = hardware.camera detector_fn", "position and subtracting the `default` zeroed # position of the", "hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True) else: hue_str = ( f\"Hue calib:\\nsuccessful\\nBall", "} return calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30): \"\"\"Waits for", "% 360 # Convert back to [0, 360] print(f\"Hues are:", "return CalibPos(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x, y), radius)", "Warning! This mutates the state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and hue_calib.success:", "Calibrate position pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down) if pos_calib.early_quit:", "import logging as log from env import MoabEnv from typing", "buttons.menu_button: return CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:]))", "plate_angles_to_servo_positions @dataclass class CalibHue: hue: int = 44 # Reasonable", "{avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue calibration", "https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues) sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean,", "cache. TODO: added this while searching for a state bug", "when the hue calibration fails) # Have a nice filename", "the calibration with uncalibrated servos hardware.servo_offsets = (0, 0, 0)", "MIT License. \"\"\" Calibration Controller Performs calibration for hue, center", "return iter(astuple(self)) @dataclass class CalibPos: position: Tuple[float, float] = (0.0,", "133, 133) run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos() if __name__ == \"__main__\":", "import cv2 import time import json import argparse import datetime", "balls which are too far from the center and too", "Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif not (pos_calib.success or hue_calib.success): #", "MoabEnv from typing import Tuple from common import Vector2 from", "for the last 100 timesteps is under the limit if", "\"\"\"Waits for either the joystick or the menu. Returns the", "print(f\"Hues are: {detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\") return", "else: # Use defaults calibration_dict = { \"ball_hue\": 44, \"plate_offsets\":", "hardware.get_buttons() time.sleep(sleep_time) if menu_button: return def run_calibration(env, pid_fn, calibration_file): #", "= (0.0, 0.0) success: bool = False early_quit: bool =", "hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down) if hue_calib.early_quit: hardware.go_up() return", "img_frame, _ = camera_fn() # Huemask keeps an internal cache.", "limit if (prev_100_x < stationary_vel) and (prev_100_y < stationary_vel): #", "{s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True ) hardware.go_up() def", "in the center that is large enough if ball_detected and", "[1.0 for _ in range(100)] vel_y_hist = [1.0 for _", "lift plate up fist hardware.set_angles(0, 0) # Calibrate servo offsets", "camera_fn = hardware.camera detector_fn = hardware.detector menu_button = False while", "an internal cache. By sending a new hue (hue +", "time_limit: state = env.step(action) action, info = pid_fn(state) (x, y,", "servo_calib.servos write_calibration(calibration_dict) # Update the environment to use the new", "run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream(): # Get some", "is_menu_down(hardware=hardware) -> bool: return hardware.get_buttons().menu_button # lift plate up first", "what the moab camera # sees (useful for debugging when", "with open(calibration_file, \"w+\") as outfile: log.info(f\"Creating calibration file {calibration_file}\") json.dump(calibration_dict,", "if ball_detected and ball_close_enough(x, y, radius): x_offset = round(x, 3)", "def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn): for i in range(10): #", "0.0, 0.0) success: bool = False early_quit: bool = False", "break env.hardware.go_up() return wait_for_menu_and_stream def main(calibration_file, frequency=30, debug=True): pid_fn =", "menu_button: return def run_calibration(env, pid_fn, calibration_file): # Get some hidden", "# reject balls which are too far from the center", "the average velocity for the last 100 timesteps is under", "ball on stand\\nclick joystick\", # \"Place ball in\\ncenter using\\nclear stand.\\n\\n\"", "return iter(astuple(self)) def ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01): # reject", "for _ in range(100)] vel_y_hist = [1.0 for _ in", "\"Click menu\\nto return...\\n\", scrolling=True, ) # When the calibration is", "CalibPos: position: Tuple[float, float] = (0.0, 0.0) success: bool =", "else: hue_str = ( f\"Hue calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\" if", "either the joystick or the menu. Returns the buttons\"\"\" while", "is complete def __iter__(self): return iter(astuple(self)) @dataclass class CalibPos: position:", "hue_calib.hue, is_menu_down) if pos_calib.early_quit: hardware.go_up() return # Save calibration calibration_dict", "hardware.get_buttons() if menu or joy: break env.hardware.go_up() return wait_for_menu_and_stream def", "sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\") if os.path.isfile(calibration_file): with open(calibration_file,", "= camera_fn() detector_fn(img_frame, debug=True) # Save to streaming menu, joy,", "vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \",", "hardware.display( \"put ball on stand\\nclick joystick\", # \"Place ball in\\ncenter", "Licensed under the MIT License. \"\"\" Calibration Controller Performs calibration", "\"\"\" Calibration Controller Performs calibration for hue, center of camera", "0)) servo_offsets = list(servos - servos_zeroed) return CalibServos(servos=servo_offsets, success=True) #", "detector_fn = hardware.detector # Start the calibration with uncalibrated servos", "\"Click menu\\nto return...\\n\", scrolling=True, ) print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\")", "\", (prev_100_x, prev_100_y)) # If the average velocity for the", "f: calibration_dict = json.load(f) else: # Use defaults calibration_dict =", "datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\" if hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else:", "ball_detected and ball_close_enough(x, y, radius): x_offset = round(x, 3) y_offset", "the `default` zeroed # position of the servos. servos =", "CalibPos(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x, y), radius) =", "i in range(10): # Try and detect for 10 frames", "hardware.get_buttons() if buttons.menu_button or buttons.joy_button: return buttons time.sleep(sleep_time) def wait_for_menu(hardware,", "stationary_vel=0.005, time_limit=20): start_time = time.time() action = Vector2(0, 0) #", "\"ball_hue\": 44, \"plate_offsets\": (0.0, 0.0), \"servo_offsets\": (0.0, 0.0, 0.0), }", "def run_calibration(env, pid_fn, calibration_file): # Get some hidden things from", "= kwargs[\"env\"] hardware = env.hardware camera_fn = hardware.camera detector_fn =", "calibration # Warning! This mutates the state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success", "common import Vector2 from detector import hsv_detector from controllers import", "as log from env import MoabEnv from typing import Tuple", "subtracting the `default` zeroed # position of the servos. servos", "< max_ball_dist and radius > min_ball_dist ) def calibrate_hue(camera_fn, detector_fn,", "# If the average velocity for the last 100 timesteps", "max_ball_dist and np.abs(y) < max_ball_dist and radius > min_ball_dist )", "else \"(X, Y) calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially succeeded\\n\\n\" + hue_str", "state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display( f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\"", "reached while time.time() < start_time + time_limit: state = env.step(action)", "is under the limit if (prev_100_x < stationary_vel) and (prev_100_y", "+= f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename += f\".fail.{time_of_day}.jpg\" img_frame, _ = camera_fn()", "message and wait for joystick hardware.display( \"put ball on stand\\nclick", "return wait_for_menu_and_stream def main(calibration_file, frequency=30, debug=True): pid_fn = pid_controller(frequency=frequency) with", "calibration_file): # Get some hidden things from env hardware =", "calibration failed.\") return CalibHue() def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn): for", "env import MoabEnv from typing import Tuple from common import", "np.cos(detected_hues_rad) sin_mean, cos_mean = np.mean(sines), np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean, cos_mean)", "servo_calib.success: hardware.display( f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\",", "pid_fn, calibration_file) env.hardware.disable_servos() if __name__ == \"__main__\": # Parse command", "Huemask keeps an internal cache. By sending a new hue", "timesteps is under the limit if (prev_100_x < stationary_vel) and", "3) y_offset = round(y, 3) log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return", "time limit was reached while time.time() < start_time + time_limit:", "calibration_dict = { \"ball_hue\": 44, \"plate_offsets\": (0.0, 0.0), \"servo_offsets\": (0.0,", "frequency=30, debug=True): pid_fn = pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug) as env:", "# Get some hidden things from env hardware = env.hardware", "np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean, cos_mean) avg_hue = np.degrees(avg_hue_rad) % 360", "is_menu_down_fn): for i in range(10): # Try and detect for", "filename = \"/tmp/hue\" if hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename", "Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"] =", "in range(100)] vel_y_hist = [1.0 for _ in range(100)] #", "before giving up if is_menu_down_fn(): return CalibPos(early_quit=True) img_frame, elapsed_time =", "image of what the moab camera # sees (useful for", "CalibHue(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x, y), radius) =", "Quit on menu down if buttons.menu_button: return CalibServos(early_quit=True) if ball_detected:", "to be able to stream the calib results env =", "env, stationary_vel=0.005, time_limit=20): start_time = time.time() action = Vector2(0, 0)", "complete def __iter__(self): return iter(astuple(self)) def ball_close_enough(x, y, radius, max_ball_dist=0.045,", "servo_offsets = list(servos - servos_zeroed) return CalibServos(servos=servo_offsets, success=True) # If", "30): \"\"\"Waits for either the joystick or the menu. Returns", "are too far from the center and too small return", "def run_servo_calibration(env, pid_fn, calibration_file): # Warning: servo calib works but", "If we found a ball roughly in the center that", "calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream(): # Get", "np.degrees(avg_hue_rad) % 360 # Convert back to [0, 360] print(f\"Hues", "success=True) # If the plate could be stabilized in time_limit", "the environment to use the new calibration # Warning! This", "command line args parser = argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\",", "/ 30): while True: menu_button, joy_button, joy_x, joy_y = hardware.get_buttons()", "is_menu_down) if hue_calib.early_quit: hardware.go_up() return # Calibrate position pos_calib =", "\"plate_offsets\": (0.0, 0.0), \"servo_offsets\": (0.0, 0.0, 0.0), } return calibration_dict", "0) # Display message and wait for joystick hardware.display( \"put", "< max_ball_dist and np.abs(y) < max_ball_dist and radius > min_ball_dist", "{avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue calibration failed.\") return CalibHue()", "= \"/tmp/hue\" if hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename +=", "currently give a good calibration raise NotImplementedError # Get some", "ball_close_enough(x, y, radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f} {y:0.3f},", "360] print(f\"Hues are: {detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\")", "environment to use the new calibration # Warning! This mutates", "prev_100_y)) # If the average velocity for the last 100", "< stationary_vel) and (prev_100_y < stationary_vel): # Calculate offsets by", "offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter without\\n stand.\\n\\n\" \"Click joystick\\nto", "of the servos. servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0))", "failed.\") return CalibHue() def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn): for i", "offsets by calculating servo positions at the # current stable", "from env to be able to stream the calib results", "= calibrate_hue(camera_fn, detector_fn, is_menu_down) if hue_calib.early_quit: hardware.go_up() return # Calibrate", "of camera position, and servo offsets \"\"\" import os import", "from dataclasses import dataclass, astuple from hardware import plate_angles_to_servo_positions @dataclass", "{y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset calibration failed.\") return CalibPos()", "pos_calib.early_quit: hardware.go_up() return # Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"]", "\"Hue calib:\\nfailed\\n\\n\" ) pos_str = ( f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f},", "ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev", "if menu or joy: break env.hardware.go_up() return wait_for_menu_and_stream def main(calibration_file,", "# When the calibration is complete, save the image of", "run_servo_calibration(env, pid_fn, calibration_file): # Warning: servo calib works but doesn't", "360 # Convert back to [0, 360] print(f\"Hues are: {detected_hues}\")", "quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn, env) #", "calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos s1, s2, s3", "servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True) else: hue_str = ( f\"Hue", "x_offset = round(x, 3) y_offset = round(y, 3) log.info(f\"Offset calibrated:", "class CalibHue: hue: int = 44 # Reasonable default success:", "np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets = list(servos - servos_zeroed)", "using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen ready.\" scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware)", "Early quit hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down)", "= 44 # Reasonable default success: bool = False early_quit:", "hsv_detector from controllers import pid_controller from dataclasses import dataclass, astuple", "time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\" if hue_calib.success: filename +=", "\"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True ) hardware.go_up() def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"],", "# the cache. TODO: added this while searching for a", "{s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\", scrolling=True, ) print(f\"servo offsets =\\n({s1:.2f},", "plate up fist hardware.set_angles(0, 0) # Calibrate servo offsets hardware.display(", "{100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else \"(X, Y) calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially", "hardware = env.hardware camera_fn = hardware.camera detector_fn = hardware.detector #", "sum_x, sum_y), ball_detected, buttons = state # Quit on menu", "# Save to streaming menu, joy, _, _ = hardware.get_buttons()", "hue=hue) # If we found a ball roughly in the", "for _ in range(100)] # Run until the ball has", "CalibServos: servos: Tuple[float, float, float] = (0.0, 0.0, 0.0) success:", "This mutates the state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display( f\"servo offsets", "elapsed_time = camera_fn() hue_options = list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues =", "40 so that the steps are even img_frame, elapsed_time =", "os import cv2 import time import json import argparse import", "Vector2(0, 0) # Initial high vel_history (to use the vel_hist[-100:]", "= [1.0 for _ in range(100)] # Run until the", "0.0), \"servo_offsets\": (0.0, 0.0, 0.0), } return calibration_dict def wait_for_joystick_or_menu(hardware,", "invalidates # the cache. TODO: added this while searching for", "detector_fn(img_frame, hue=hue, debug=True) # If we found a ball roughly", "open(calibration_file, \"r\") as f: calibration_dict = json.load(f) else: # Use", "/ 30): \"\"\"Waits for either the joystick or the menu.", "\" f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue) if len(detected_hues) >", "return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn, env) # Save calibration", "the vel_hist[-100:] later) vel_x_hist = [1.0 for _ in range(100)]", "= camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue) #", "complete def __iter__(self): return iter(astuple(self)) @dataclass class CalibPos: position: Tuple[float,", "this while searching for a state bug detector_fn(img_frame, hue=hue_calib.hue +", "env) # Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos", "if is_menu_down_fn(): return CalibPos(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x,", "state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and hue_calib.success: # and servo_calib.success: hardware.display(f\"Ok!", "hardware.display( \"Calibration\\npartially succeeded\\n\\n\" + hue_str + pos_str + \"Click menu\\nto", "wait_for_joystick_or_menu(hardware, sleep_time=1 / 30): \"\"\"Waits for either the joystick or", "giving up if is_menu_down_fn(): return CalibPos(early_quit=True) img_frame, elapsed_time = camera_fn()", "range(10): # Try and detect for 10 frames before giving", "avg_hue = np.degrees(avg_hue_rad) % 360 # Convert back to [0,", "= hardware.camera detector_fn = hardware.detector def is_menu_down(hardware=hardware) -> bool: return", "works but doesn't currently give a good calibration raise NotImplementedError", "img_frame, elapsed_time = camera_fn() hue_options = list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues", "fist hardware.set_angles(0, 0) # Calibrate servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place", "enough if ball_detected and ball_close_enough(x, y, radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected},", "= ( f\"Hue calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\" if hue_calib.success else", "f\"Hue calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\" if hue_calib.success else \"Hue calib:\\nfailed\\n\\n\"", "menu\\nto return...\\n\", scrolling=True, ) # When the calibration is complete,", "# Parse command line args parser = argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\",", "# Warning! This mutates the state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display(", "cos_mean) avg_hue = np.degrees(avg_hue_rad) % 360 # Convert back to", "vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100:", "# Use defaults calibration_dict = { \"ball_hue\": 44, \"plate_offsets\": (0.0,", "servos hardware.servo_offsets = (0, 0, 0) # lift plate up", "scrolling=True, ) print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick", "if buttons.menu_button or buttons.joy_button: return buttons time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1", "1) invalidates # the cache. TODO: added this while searching", "hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn, env) # Save calibration calibration_dict", "json import argparse import datetime import numpy as np import", "iter(astuple(self)) def ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01): # reject balls", "NotImplementedError # Get some hidden things from env hardware =", "logging as log from env import MoabEnv from typing import", "detector_fn(img_frame, hue=hue) # If we found a ball roughly in", "and whether it succeeded or failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename", "hardware.camera detector_fn = hardware.detector # Start the calibration with uncalibrated", "= argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str) args,", "import time import json import argparse import datetime import numpy", "kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream(): # Get some hidden things from", "float] = (0.0, 0.0, 0.0) success: bool = False early_quit:", "CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset calibration failed.\") return CalibPos() def calibrate_servo_offsets(pid_fn,", "(0.0, 0.0, 0.0) success: bool = False early_quit: bool =", "def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\") if os.path.isfile(calibration_file): with open(calibration_file, \"r\")", "as env: env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133, 133)", "line args parser = argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\",", "the servos. servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets", "= list(servos - servos_zeroed) return CalibServos(servos=servo_offsets, success=True) # If the", "hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down) if hue_calib.early_quit:", "return CalibHue() def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn): for i in", "time import json import argparse import datetime import numpy as", "wait for joystick hardware.display( \"put ball on stand\\nclick joystick\", #", "menu is pressed before the calibration is complete def __iter__(self):", "# Reasonable default success: bool = False early_quit: bool =", "max_ball_dist=0.045, min_ball_dist=0.01): # reject balls which are too far from", "sending a new hue (hue + 1) invalidates # the", "not menu_button: img_frame, _ = camera_fn() detector_fn(img_frame, debug=True) # Save", "Run until the ball has stabilized or the time limit", "success=True) else: log.warning(f\"Hue calibration failed.\") return CalibHue() def calibrate_pos(camera_fn, detector_fn,", "for either the joystick or the menu. Returns the buttons\"\"\"", "hue=hue_calib.hue + 1, debug=True, filename=filename) hardware.go_up() def run_servo_calibration(env, pid_fn, calibration_file):", "found a ball roughly in the center that is large", "pos_str = ( f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success", "radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" )", "detected_hues.append(hue) if len(detected_hues) > 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues)", "calibration for hue, center of camera position, and servo offsets", "+ pos_str + \"Click menu\\nto return...\\n\", scrolling=True, ) # When", "Returns the buttons\"\"\" while True: buttons = hardware.get_buttons() if buttons.menu_button", "which are too far from the center and too small", "servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif not (pos_calib.success or", "servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets = list(servos - servos_zeroed) return", "if hue_calib.success else \"(X, Y) calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially succeeded\\n\\n\"", "by calculating servo positions at the # current stable position", "min_ball_dist ) def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low = 0 hue_high", "((x, y), radius) = detector_fn(img_frame, hue=hue, debug=True) # If we", "in range(10): # Try and detect for 10 frames before", "is complete, save the image of what the moab camera", "@dataclass class CalibHue: hue: int = 44 # Reasonable default", "vel_hist[-100:] later) vel_x_hist = [1.0 for _ in range(100)] vel_y_hist", "joystick\\nto continue.\", scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: #", "{s2:.2f}, {s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True ) hardware.go_up()", "np.abs(x) < max_ball_dist and np.abs(y) < max_ball_dist and radius >", "before the calibration is complete def __iter__(self): return iter(astuple(self)) @dataclass", "= [1.0 for _ in range(100)] vel_y_hist = [1.0 for", "velocity for the last 100 timesteps is under the limit", "uncalibrated servos hardware.servo_offsets = (0, 0, 0) # lift plate", "parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str) args, _ =", "# Convert back to [0, 360] print(f\"Hues are: {detected_hues}\") print(f\"Hue", "reject balls which are too far from the center and", "env.hardware camera_fn = hardware.camera detector_fn = hardware.detector def is_menu_down(hardware=hardware) ->", "joystick hardware.display( \"put ball on stand\\nclick joystick\", # \"Place ball", "as outfile: log.info(f\"Creating calibration file {calibration_file}\") json.dump(calibration_dict, outfile, indent=4, sort_keys=True)", "wait_for_menu_and_stream(): # Get some hidden things from env to be", "and ball_close_enough(x, y, radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f}", "hue_str = ( f\"Hue calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\" if hue_calib.success", "calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") # write out stuff with open(calibration_file, \"w+\")", "# sees (useful for debugging when the hue calibration fails)", "hardware.go_up() def run_servo_calibration(env, pid_fn, calibration_file): # Warning: servo calib works", "False while not menu_button: img_frame, _ = camera_fn() detector_fn(img_frame, debug=True)", "hue, center of camera position, and servo offsets \"\"\" import", "write out stuff with open(calibration_file, \"w+\") as outfile: log.info(f\"Creating calibration", "read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset, y_offset =", "hue_high, hue_steps)) detected_hues = [] for hue in hue_options: if", "while True: menu_button, joy_button, joy_x, joy_y = hardware.get_buttons() time.sleep(sleep_time) if", "# or servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True) else: hue_str =", "calib:\\nfailed\\n\\n\" ) pos_str = ( f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\"", "ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01): # reject balls which are", "not (pos_calib.success or hue_calib.success): # or servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\",", "hue_options: if is_menu_down_fn(): return CalibHue(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected,", "np.mean(sines), np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean, cos_mean) avg_hue = np.degrees(avg_hue_rad) %", "pid_fn, calibration_file): # Warning: servo calib works but doesn't currently", "as np import logging as log from env import MoabEnv", "def __iter__(self): return iter(astuple(self)) @dataclass class CalibPos: position: Tuple[float, float]", "= hardware.get_buttons() time.sleep(sleep_time) if menu_button: return def run_calibration(env, pid_fn, calibration_file):", "+= f\".fail.{time_of_day}.jpg\" img_frame, _ = camera_fn() # Huemask keeps an", "debug=True, filename=filename) hardware.go_up() def run_servo_calibration(env, pid_fn, calibration_file): # Warning: servo", "_ = camera_fn() detector_fn(img_frame, debug=True) # Save to streaming menu,", "joy_button, joy_x, joy_y = hardware.get_buttons() time.sleep(sleep_time) if menu_button: return def", "import os import cv2 import time import json import argparse", "and detect for 10 frames before giving up if is_menu_down_fn():", "json.dump(calibration_dict, outfile, indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\") if", "succeeded\\n\\n\" + hue_str + pos_str + \"Click menu\\nto return...\\n\", scrolling=True,", "added this while searching for a state bug detector_fn(img_frame, hue=hue_calib.hue", "run_calibration(env, pid_fn, calibration_file): # Get some hidden things from env", "without\\n stand.\\n\\n\" \"Click joystick\\nto continue.\", scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware)", "# Initial high vel_history (to use the vel_hist[-100:] later) vel_x_hist", "camera_fn() # Huemask keeps an internal cache. By sending a", "y_offset), success=True) log.warning(f\"Offset calibration failed.\") return CalibPos() def calibrate_servo_offsets(pid_fn, env,", "# Get some hidden things from env to be able", "scrolling=True ) hardware.go_up() def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], )", "enough if ball_detected and ball_close_enough(x, y, radius): x_offset = round(x,", "# If we found a ball roughly in the center", "time.sleep(sleep_time) if menu_button: return def run_calibration(env, pid_fn, calibration_file): # Get", "stable position and subtracting the `default` zeroed # position of", "\"put ball on stand\\nclick joystick\", # \"Place ball in\\ncenter using\\nclear", "0, 0) # lift plate up fist hardware.set_angles(0, 0) #", "100 timesteps is under the limit if (prev_100_x < stationary_vel)", "menu_button = False while not menu_button: img_frame, _ = camera_fn()", "(useful for debugging when the hue calibration fails) # Have", "if os.path.isfile(calibration_file): with open(calibration_file, \"r\") as f: calibration_dict = json.load(f)", "def wait_for_menu_and_stream(): # Get some hidden things from env to", "is large enough if ball_detected and ball_close_enough(x, y, radius): log.info(", "the buttons\"\"\" while True: buttons = hardware.get_buttons() if buttons.menu_button or", "servo offsets \"\"\" import os import cv2 import time import", "zeroed # position of the servos. servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed", "+ hue_str + pos_str + \"Click menu\\nto return...\\n\", scrolling=True, )", "if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:]))", "# Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos s1,", "= np.degrees(avg_hue_rad) % 360 # Convert back to [0, 360]", "(prev_100_x, prev_100_y)) # If the average velocity for the last", "= pos_calib.position write_calibration(calibration_dict) # Update the environment to use the", "menu...\", scrolling=True) else: hue_str = ( f\"Hue calib:\\nsuccessful\\nBall hue =", "new hue (hue + 1) invalidates # the cache. TODO:", "cos_mean = np.mean(sines), np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean, cos_mean) avg_hue =", "vel_x, vel_y, sum_x, sum_y), ball_detected, buttons = state # Quit", "is large enough if ball_detected and ball_close_enough(x, y, radius): x_offset", "buttons = state # Quit on menu down if buttons.menu_button:", "\"Click joystick\\nwhen ready.\" scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button:", "os.path.isfile(calibration_file): with open(calibration_file, \"r\") as f: calibration_dict = json.load(f) else:", "import hsv_detector from controllers import pid_controller from dataclasses import dataclass,", "from detector import hsv_detector from controllers import pid_controller from dataclasses", "# https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues) sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad)", "pid_fn(state) (x, y, vel_x, vel_y, sum_x, sum_y), ball_detected, buttons =", "from env hardware = env.hardware camera_fn = hardware.camera detector_fn =", "f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename += f\".fail.{time_of_day}.jpg\" img_frame, _ = camera_fn() #", "min_ball_dist=0.01): # reject balls which are too far from the", "down if buttons.menu_button: return CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x", "camera_fn() hue_options = list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues = [] for", "float, float] = (0.0, 0.0, 0.0) success: bool = False", "hue, is_menu_down_fn): for i in range(10): # Try and detect", "the joystick or the menu. Returns the buttons\"\"\" while True:", "seconds, quit log.warning(f\"Servo calibration failed.\") return CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"):", "import MoabEnv from typing import Tuple from common import Vector2", "or hue_calib.success): # or servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True) else:", "scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit", "(pos_calib.success or hue_calib.success): # or servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True)", "far from the center and too small return ( np.abs(x)", "# Copyright (c) Microsoft Corporation. # Licensed under the MIT", "results env = kwargs[\"env\"] hardware = env.hardware camera_fn = hardware.camera", "hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif not (pos_calib.success or hue_calib.success):", "hardware.camera detector_fn = hardware.detector menu_button = False while not menu_button:", "Calculate offsets by calculating servo positions at the # current", "if servo_calib.success: hardware.display( f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto", "= hardware.detector menu_button = False while not menu_button: img_frame, _", "camera position, and servo offsets \"\"\" import os import cv2", "\"servo_offsets\": (0.0, 0.0, 0.0), } return calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1", "return CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") # write out", "main(calibration_file, frequency=30, debug=True): pid_fn = pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug) as", "CalibPos() def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20): start_time = time.time() action", "could be stabilized in time_limit seconds, quit log.warning(f\"Servo calibration failed.\")", "use the vel_hist[-100:] later) vel_x_hist = [1.0 for _ in", "CalibHue: hue: int = 44 # Reasonable default success: bool", "return # Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue", "env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display( f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click", "bool = False early_quit: bool = False # If menu", "hue_low = 0 hue_high = 360 hue_steps = 41 #", "# lift plate up fist hardware.set_angles(0, 0) # Calibrate servo", "calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low = 0 hue_high = 360 hue_steps", "= time.time() action = Vector2(0, 0) # Initial high vel_history", "-> bool: return hardware.get_buttons().menu_button # lift plate up first hardware.set_angles(0,", "calibration with uncalibrated servos hardware.servo_offsets = (0, 0, 0) #", "a nice filename with the time and whether it succeeded", "filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename += f\".fail.{time_of_day}.jpg\" img_frame, _ =", "= hardware.detector # Start the calibration with uncalibrated servos hardware.servo_offsets", "calibration_file): # Warning: servo calib works but doesn't currently give", "\"__main__\": # Parse command line args parser = argparse.ArgumentParser() parser.add_argument(\"-d\",", "radius, max_ball_dist=0.045, min_ball_dist=0.01): # reject balls which are too far", "Get some hidden things from env hardware = env.hardware camera_fn", "# Run until the ball has stabilized or the time", "( f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else \"(X,", "buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib =", "was reached while time.time() < start_time + time_limit: state =", "hue_high = 360 hue_steps = 41 # Is 41 instead", "elif not (pos_calib.success or hue_calib.success): # or servo_calib.success): hardware.display(\"Calibration failed\\nClick", "\"(X, Y) calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially succeeded\\n\\n\" + hue_str +", "the # current stable position and subtracting the `default` zeroed", "np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets = list(servos - servos_zeroed) return CalibServos(servos=servo_offsets, success=True)", "center that is large enough if ball_detected and ball_close_enough(x, y,", "complete def __iter__(self): return iter(astuple(self)) @dataclass class CalibServos: servos: Tuple[float,", "stationary_vel): # Calculate offsets by calculating servo positions at the", ") detected_hues.append(hue) if len(detected_hues) > 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad =", "round(x, 3) y_offset = round(y, 3) log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\")", "camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue) # If", "stabilized or the time limit was reached while time.time() <", "buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit hardware.go_up() return", "joy_x, joy_y = hardware.get_buttons() time.sleep(sleep_time) if menu_button: return def run_calibration(env,", "plate could be stabilized in time_limit seconds, quit log.warning(f\"Servo calibration", "frames before giving up if is_menu_down_fn(): return CalibPos(early_quit=True) img_frame, elapsed_time", "hardware = env.hardware camera_fn = hardware.camera detector_fn = hardware.detector def", "info = pid_fn(state) (x, y, vel_x, vel_y, sum_x, sum_y), ball_detected,", "camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue, debug=True) #", "y), radius) = detector_fn(img_frame, hue=hue, debug=True) # If we found", "\"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str) args, _ = parser.parse_known_args()", "and servo offsets \"\"\" import os import cv2 import time", "state # Quit on menu down if buttons.menu_button: return CalibServos(early_quit=True)", "before the calibration is complete def __iter__(self): return iter(astuple(self)) def", "is_menu_down_fn(): return CalibPos(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x, y),", "Tuple[float, float] = (0.0, 0.0) success: bool = False early_quit:", "max_ball_dist and radius > min_ball_dist ) def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn):", "np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean = np.mean(sines), np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean,", "calibrate_hue(camera_fn, detector_fn, is_menu_down) if hue_calib.early_quit: hardware.go_up() return # Calibrate position", "hardware.go_up() def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream():", "\"Click joystick\\nto continue.\", scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button:", "write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") # write out stuff with open(calibration_file,", "img_frame, elapsed_time = camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame,", "(prev_100_x < stationary_vel) and (prev_100_y < stationary_vel): # Calculate offsets", ") # When the calibration is complete, save the image", "= [] for hue in hue_options: if is_menu_down_fn(): return CalibHue(early_quit=True)", "def ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01): # reject balls which", "buttons.joy_button: return buttons time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1 / 30): while", "log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue)", "menu_button, joy_button, joy_x, joy_y = hardware.get_buttons() time.sleep(sleep_time) if menu_button: return", "able to stream the calib results env = kwargs[\"env\"] hardware", "argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\") parser.add_argument(\"-f\", \"--file\", default=\"bot.json\", type=str) args, _", "out stuff with open(calibration_file, \"w+\") as outfile: log.info(f\"Creating calibration file", "even img_frame, elapsed_time = camera_fn() hue_options = list(np.linspace(hue_low, hue_high, hue_steps))", "def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20): start_time = time.time() action =", "y), radius) = detector_fn(img_frame, hue=hue) # If we found a", "CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") # write out stuff", ") hardware.display( \"Calibration\\npartially succeeded\\n\\n\" + hue_str + pos_str + \"Click", "argparse import datetime import numpy as np import logging as", "return CalibHue(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x, y), radius)", "with open(calibration_file, \"r\") as f: calibration_dict = json.load(f) else: #", "plate up first hardware.set_angles(0, 0) # Display message and wait", "moab camera # sees (useful for debugging when the hue", "detector_fn, is_menu_down_fn): hue_low = 0 hue_high = 360 hue_steps =", "# Is 41 instead of 40 so that the steps", "import Tuple from common import Vector2 from detector import hsv_detector", "servos: Tuple[float, float, float] = (0.0, 0.0, 0.0) success: bool", "camera_fn = hardware.camera detector_fn = hardware.detector # Start the calibration", "things from env hardware = env.hardware camera_fn = hardware.camera detector_fn", "ball in\\ncenter without\\n stand.\\n\\n\" \"Click joystick\\nto continue.\", scrolling=True, ) buttons", "= detector_fn(img_frame, hue=hue, debug=True) # If we found a ball", "= np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \", (prev_100_x, prev_100_y)) # If the", "if menu_button: return def run_calibration(env, pid_fn, calibration_file): # Get some", "pos_str + \"Click menu\\nto return...\\n\", scrolling=True, ) # When the", "debug=debug) as env: env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133,", "Tuple[float, float, float] = (0.0, 0.0, 0.0) success: bool =", "# write out stuff with open(calibration_file, \"w+\") as outfile: log.info(f\"Creating", "calculating servo positions at the # current stable position and", "the steps are even img_frame, elapsed_time = camera_fn() hue_options =", "raise NotImplementedError # Get some hidden things from env hardware", "= env.hardware camera_fn = hardware.camera detector_fn = hardware.detector # Start", "= hardware.camera detector_fn = hardware.detector # Start the calibration with", "time.sleep(0.2) env.hardware.set_servos(133, 133, 133) run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos() if __name__", "hardware.set_angles(0, 0) # Display message and wait for joystick hardware.display(", "wait_for_menu(hardware, sleep_time=1 / 30): while True: menu_button, joy_button, joy_x, joy_y", "def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30): \"\"\"Waits for either the joystick", "servo_calib = calibrate_servo_offsets(pid_fn, env) # Save calibration calibration_dict = read_calibration(calibration_file)", "from hardware import plate_angles_to_servo_positions @dataclass class CalibHue: hue: int =", "Performs calibration for hue, center of camera position, and servo", "offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\", scrolling=True, ) print(f\"servo", "bool = False # If menu is pressed before the", "the ball has stabilized or the time limit was reached", "bug detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True, filename=filename) hardware.go_up() def run_servo_calibration(env,", "radius) = detector_fn(img_frame, hue=hue) # If we found a ball", "the time limit was reached while time.time() < start_time +", "while True: buttons = hardware.get_buttons() if buttons.menu_button or buttons.joy_button: return", "if hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename += f\".fail.{time_of_day}.jpg\" img_frame,", "(0, 0, 0) # lift plate up fist hardware.set_angles(0, 0)", "in\\ncenter using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen ready.\" scrolling=True, ) buttons =", "calibration_file) env.hardware.disable_servos() if __name__ == \"__main__\": # Parse command line", "hardware = env.hardware camera_fn = hardware.camera detector_fn = hardware.detector menu_button", "or servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True) else: hue_str = (", "hue_steps)) detected_hues = [] for hue in hue_options: if is_menu_down_fn():", "pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug) as env: env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos()", "servo positions at the # current stable position and subtracting", "= 360 hue_steps = 41 # Is 41 instead of", "good calibration raise NotImplementedError # Get some hidden things from", "sin_mean, cos_mean = np.mean(sines), np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean, cos_mean) avg_hue", "# lift plate up first hardware.set_angles(0, 0) # Display message", "> min_ball_dist ) def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low = 0", "the limit if (prev_100_x < stationary_vel) and (prev_100_y < stationary_vel):", "debug=True) # If we found a ball roughly in the", "return CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y", "pid_fn, calibration_file): # Get some hidden things from env hardware", "for hue in hue_options: if is_menu_down_fn(): return CalibHue(early_quit=True) img_frame, elapsed_time", "print(f\"Avg hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue calibration failed.\")", "prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \", (prev_100_x, prev_100_y)) # If", "# \"Place ball in\\ncenter using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen ready.\" scrolling=True,", "hue = {hue_calib.hue}\\n\\n\" if hue_calib.success else \"Hue calib:\\nfailed\\n\\n\" ) pos_str", "30): while True: menu_button, joy_button, joy_x, joy_y = hardware.get_buttons() time.sleep(sleep_time)", "radius): x_offset = round(x, 3) y_offset = round(y, 3) log.info(f\"Offset", "is_menu_down_fn): hue_low = 0 hue_high = 360 hue_steps = 41", "(c) Microsoft Corporation. # Licensed under the MIT License. \"\"\"", "instead of 40 so that the steps are even img_frame,", "the MIT License. \"\"\" Calibration Controller Performs calibration for hue,", "calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset, y_offset = pos_calib.position write_calibration(calibration_dict) # Update", "hue_calib.success: # and servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif", "\"\"\" import os import cv2 import time import json import", "center of camera position, and servo offsets \"\"\" import os", "# Try and detect for 10 frames before giving up", "return iter(astuple(self)) @dataclass class CalibServos: servos: Tuple[float, float, float] =", "debug=True): pid_fn = pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug) as env: env.step((0,", "are even img_frame, elapsed_time = camera_fn() hue_options = list(np.linspace(hue_low, hue_high,", "hue: int = 44 # Reasonable default success: bool =", "back to [0, 360] print(f\"Hues are: {detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\")", "detected_hues_rad = np.radians(detected_hues) sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean", "# Have a nice filename with the time and whether", "internal cache. By sending a new hue (hue + 1)", "state = env.step(action) action, info = pid_fn(state) (x, y, vel_x,", "camera_fn = hardware.camera detector_fn = hardware.detector def is_menu_down(hardware=hardware) -> bool:", "= hardware.detector def is_menu_down(hardware=hardware) -> bool: return hardware.get_buttons().menu_button # lift", "to stream the calib results env = kwargs[\"env\"] hardware =", "hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset, y_offset = pos_calib.position write_calibration(calibration_dict) #", "iter(astuple(self)) @dataclass class CalibServos: servos: Tuple[float, float, float] = (0.0,", "return CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset calibration failed.\") return CalibPos() def", "x_offset, y_offset = pos_calib.position write_calibration(calibration_dict) # Update the environment to", "read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos s1, s2, s3 = servo_calib.servos write_calibration(calibration_dict)", "outfile, indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\") if os.path.isfile(calibration_file):", "calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset, y_offset = pos_calib.position", "import plate_angles_to_servo_positions @dataclass class CalibHue: hue: int = 44 #", "and ball_close_enough(x, y, radius): x_offset = round(x, 3) y_offset =", "= pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug) as env: env.step((0, 0)) time.sleep(0.2)", "= ( f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else", "import numpy as np import logging as log from env", "True: menu_button, joy_button, joy_x, joy_y = hardware.get_buttons() time.sleep(sleep_time) if menu_button:", "mutates the state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display( f\"servo offsets =\\n({s1:.2f},", "(to use the vel_hist[-100:] later) vel_x_hist = [1.0 for _", "= calibrate_servo_offsets(pid_fn, env) # Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"]", "from env import MoabEnv from typing import Tuple from common", "Warning: servo calib works but doesn't currently give a good", "calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially succeeded\\n\\n\" + hue_str + pos_str +", "some hidden things from env to be able to stream", "_ = camera_fn() # Huemask keeps an internal cache. By", "pos_calib.position write_calibration(calibration_dict) # Update the environment to use the new", "whether it succeeded or failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename =", "= hardware.get_buttons() if buttons.menu_button or buttons.joy_button: return buttons time.sleep(sleep_time) def", "be able to stream the calib results env = kwargs[\"env\"]", "with MoabEnv(frequency=frequency, debug=debug) as env: env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2)", "servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets = list(servos", "calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset,", "failed\\nClick menu...\", scrolling=True) else: hue_str = ( f\"Hue calib:\\nsuccessful\\nBall hue", "on menu down if buttons.menu_button: return CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x)", "under the limit if (prev_100_x < stationary_vel) and (prev_100_y <", "log.info(\"Reading previous calibration.\") if os.path.isfile(calibration_file): with open(calibration_file, \"r\") as f:", "0) # lift plate up fist hardware.set_angles(0, 0) # Calibrate", "round(y, 3) log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset), success=True)", "= camera_fn() hue_options = list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues = []", "range(100)] # Run until the ball has stabilized or the", "\"Calibration\\npartially succeeded\\n\\n\" + hue_str + pos_str + \"Click menu\\nto return...\\n\",", "def wait_for_menu(hardware, sleep_time=1 / 30): while True: menu_button, joy_button, joy_x,", "is complete def __iter__(self): return iter(astuple(self)) def ball_close_enough(x, y, radius,", "fails) # Have a nice filename with the time and", "log.warning(f\"Servo calibration failed.\") return CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\")", ") buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit hardware.go_up()", "= hardware.camera detector_fn = hardware.detector menu_button = False while not", "sleep_time=1 / 30): \"\"\"Waits for either the joystick or the", "# Warning: servo calib works but doesn't currently give a", ") hardware.go_up() def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def", "Microsoft Corporation. # Licensed under the MIT License. \"\"\" Calibration", "0.0) success: bool = False early_quit: bool = False #", "= hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position x_offset, y_offset = pos_calib.position write_calibration(calibration_dict)", "hue_calib.success else \"(X, Y) calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially succeeded\\n\\n\" +", "hidden things from env to be able to stream the", "= False while not menu_button: img_frame, _ = camera_fn() detector_fn(img_frame,", "buttons time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1 / 30): while True: menu_button,", "= hardware.get_buttons() if menu or joy: break env.hardware.go_up() return wait_for_menu_and_stream", "Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos s1, s2,", "menu\\nto return...\", scrolling=True ) hardware.go_up() def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"],", "Try and detect for 10 frames before giving up if", "# Early quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn,", "0.0), } return calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30): \"\"\"Waits", "hue in hue_options: if is_menu_down_fn(): return CalibHue(early_quit=True) img_frame, elapsed_time =", "if is_menu_down_fn(): return CalibHue(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x,", "360 hue_steps = 41 # Is 41 instead of 40", "class CalibServos: servos: Tuple[float, float, float] = (0.0, 0.0, 0.0)", "# position of the servos. servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed =", "{y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue) if len(detected_hues) > 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities", "are: {detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue),", "debug=True) # Save to streaming menu, joy, _, _ =", "Update the environment to use the new calibration # Warning!", "__iter__(self): return iter(astuple(self)) @dataclass class CalibPos: position: Tuple[float, float] =", "for hue, center of camera position, and servo offsets \"\"\"", "= list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues = [] for hue in", "CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue calibration failed.\") return CalibHue() def calibrate_pos(camera_fn,", "bool: return hardware.get_buttons().menu_button # lift plate up first hardware.set_angles(0, 0)", "hardware.get_buttons().menu_button # lift plate up first hardware.set_angles(0, 0) # Display", "calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down) if pos_calib.early_quit: hardware.go_up() return # Save", "iter(astuple(self)) @dataclass class CalibPos: position: Tuple[float, float] = (0.0, 0.0)", "# current stable position and subtracting the `default` zeroed #", "f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue) if len(detected_hues) > 0:", "calibration fails) # Have a nice filename with the time", "a good calibration raise NotImplementedError # Get some hidden things", "kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream(): # Get some hidden", "hidden things from env hardware = env.hardware camera_fn = hardware.camera", "log.warning(f\"Hue calibration failed.\") return CalibHue() def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn):", "= wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\",", "s2, s3 = servo_calib.servos write_calibration(calibration_dict) # Update the environment to", "the center and too small return ( np.abs(x) < max_ball_dist", "Initial high vel_history (to use the vel_hist[-100:] later) vel_x_hist =", "in time_limit seconds, quit log.warning(f\"Servo calibration failed.\") return CalibServos() def", "on stand\\nclick joystick\", # \"Place ball in\\ncenter using\\nclear stand.\\n\\n\" \"Click", "limit was reached while time.time() < start_time + time_limit: state", "and radius > min_ball_dist ) def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low", "if len(detected_hues) > 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues) sines,", "calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"] = pos_calib.position", "large enough if ball_detected and ball_close_enough(x, y, radius): x_offset =", "defaults calibration_dict = { \"ball_hue\": 44, \"plate_offsets\": (0.0, 0.0), \"servo_offsets\":", "that the steps are even img_frame, elapsed_time = camera_fn() hue_options", "read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\") if os.path.isfile(calibration_file): with open(calibration_file, \"r\") as", "(0.0, 0.0), \"servo_offsets\": (0.0, 0.0, 0.0), } return calibration_dict def", "# Early quit hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn, detector_fn,", "# and servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif not", "If menu is pressed before the calibration is complete def", "[] for hue in hue_options: if is_menu_down_fn(): return CalibHue(early_quit=True) img_frame,", "= json.load(f) else: # Use defaults calibration_dict = { \"ball_hue\":", "that is large enough if ball_detected and ball_close_enough(x, y, radius):", "hue calibration fails) # Have a nice filename with the", "filename with the time and whether it succeeded or failed", "json.load(f) else: # Use defaults calibration_dict = { \"ball_hue\": 44,", "= calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down) if pos_calib.early_quit: hardware.go_up() return #", "is_menu_down) if pos_calib.early_quit: hardware.go_up() return # Save calibration calibration_dict =", "scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn, env) # Save calibration calibration_dict =", "a ball roughly in the center that is large enough", "hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue calibration failed.\") return", "= Vector2(0, 0) # Initial high vel_history (to use the", "until the ball has stabilized or the time limit was", "calibrate_servo_offsets(pid_fn, env) # Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] =", "hardware.servo_offsets = (0, 0, 0) # lift plate up fist", "in hue_options: if is_menu_down_fn(): return CalibHue(early_quit=True) img_frame, elapsed_time = camera_fn()", "\"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter without\\n stand.\\n\\n\" \"Click joystick\\nto continue.\", scrolling=True,", "is_menu_down_fn(): return CalibHue(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected, ((x, y),", "Early quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib = calibrate_servo_offsets(pid_fn, env)", "pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down) if pos_calib.early_quit: hardware.go_up() return", "a state bug detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True, filename=filename) hardware.go_up()", "joy, _, _ = hardware.get_buttons() if menu or joy: break", "up first hardware.set_angles(0, 0) # Display message and wait for", "steps are even img_frame, elapsed_time = camera_fn() hue_options = list(np.linspace(hue_low,", "return CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue calibration failed.\") return CalibHue() def", "to [0, 360] print(f\"Hues are: {detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg", "time.time() < start_time + time_limit: state = env.step(action) action, info", "so that the steps are even img_frame, elapsed_time = camera_fn()", "radius) = detector_fn(img_frame, hue=hue, debug=True) # If we found a", "save the image of what the moab camera # sees", ") print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto", "env.hardware.go_up() return wait_for_menu_and_stream def main(calibration_file, frequency=30, debug=True): pid_fn = pid_controller(frequency=frequency)", "return ( np.abs(x) < max_ball_dist and np.abs(y) < max_ball_dist and", "return hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn, detector_fn, is_menu_down) if hue_calib.early_quit: hardware.go_up()", "CalibHue() def calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn): for i in range(10):", "hardware.go_up() return # Calibrate position pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue,", "hardware.go_up() return # Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"] =", "( np.abs(x) < max_ball_dist and np.abs(y) < max_ball_dist and radius", "the image of what the moab camera # sees (useful", "((x, y), radius) = detector_fn(img_frame, hue=hue) # If we found", "too far from the center and too small return (", "datetime import numpy as np import logging as log from", "+ 1) invalidates # the cache. TODO: added this while", "open(calibration_file, \"w+\") as outfile: log.info(f\"Creating calibration file {calibration_file}\") json.dump(calibration_dict, outfile,", "Save to streaming menu, joy, _, _ = hardware.get_buttons() if", "from controllers import pid_controller from dataclasses import dataclass, astuple from", "stabilized in time_limit seconds, quit log.warning(f\"Servo calibration failed.\") return CalibServos()", "ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue) # If we", "= np.array(plate_angles_to_servo_positions(0, 0)) servo_offsets = list(servos - servos_zeroed) return CalibServos(servos=servo_offsets,", "Warning! This mutates the state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display( f\"servo", "sleep_time=1 / 30): while True: menu_button, joy_button, joy_x, joy_y =", "and servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif not (pos_calib.success", "{calibration_file}\") json.dump(calibration_dict, outfile, indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous calibration.\")", "dataclass, astuple from hardware import plate_angles_to_servo_positions @dataclass class CalibHue: hue:", "current stable position and subtracting the `default` zeroed # position", "len(detected_hues) > 0: # https://en.wikipedia.org/wiki/Mean_of_circular_quantities detected_hues_rad = np.radians(detected_hues) sines, cosines", "return...\\n\", scrolling=True, ) print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else: hardware.display(", "write_calibration(calibration_dict) # Update the environment to use the new calibration", "radius > min_ball_dist ) def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low =", "buttons = hardware.get_buttons() if buttons.menu_button or buttons.joy_button: return buttons time.sleep(sleep_time)", "detector_fn, hue, is_menu_down_fn): for i in range(10): # Try and", "calibration.\") # write out stuff with open(calibration_file, \"w+\") as outfile:", "Copyright (c) Microsoft Corporation. # Licensed under the MIT License.", "_, _ = hardware.get_buttons() if menu or joy: break env.hardware.go_up()", "ball in\\ncenter using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen ready.\" scrolling=True, ) buttons", "Parse command line args parser = argparse.ArgumentParser() parser.add_argument(\"-d\", \"--debug\", action=\"store_true\")", "if hue_calib.success else \"Hue calib:\\nfailed\\n\\n\" ) pos_str = ( f\"Position", "be stabilized in time_limit seconds, quit log.warning(f\"Servo calibration failed.\") return", "dataclasses import dataclass, astuple from hardware import plate_angles_to_servo_positions @dataclass class", "hue_str + pos_str + \"Click menu\\nto return...\\n\", scrolling=True, ) #", "(0.0, 0.0) success: bool = False early_quit: bool = False", "calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\" if hue_calib.success else \"Hue calib:\\nfailed\\n\\n\" )", "calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30): \"\"\"Waits for either the", "time and whether it succeeded or failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\")", "By sending a new hue (hue + 1) invalidates #", "average velocity for the last 100 timesteps is under the", "streaming menu, joy, _, _ = hardware.get_buttons() if menu or", "img_frame, _ = camera_fn() detector_fn(img_frame, debug=True) # Save to streaming", "calib works but doesn't currently give a good calibration raise", "import argparse import datetime import numpy as np import logging", "is complete def __iter__(self): return iter(astuple(self)) @dataclass class CalibServos: servos:", "Y) calib:\\nfailed\\n\\n\" ) hardware.display( \"Calibration\\npartially succeeded\\n\\n\" + hue_str + pos_str", "cv2 import time import json import argparse import datetime import", "detector import hsv_detector from controllers import pid_controller from dataclasses import", "stuff with open(calibration_file, \"w+\") as outfile: log.info(f\"Creating calibration file {calibration_file}\")", "{detected_hues}\") print(f\"Hue calibrated: {avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True)", "filename=filename) hardware.go_up() def run_servo_calibration(env, pid_fn, calibration_file): # Warning: servo calib", "# If the plate could be stabilized in time_limit seconds,", "f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else \"(X, Y)", "if (prev_100_x < stationary_vel) and (prev_100_y < stationary_vel): # Calculate", "(prev_100_y < stationary_vel): # Calculate offsets by calculating servo positions", "the state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success: hardware.display( f\"servo offsets =\\n({s1:.2f}, {s2:.2f},", "log from env import MoabEnv from typing import Tuple from", "y_offset = pos_calib.position write_calibration(calibration_dict) # Update the environment to use", "= { \"ball_hue\": 44, \"plate_offsets\": (0.0, 0.0), \"servo_offsets\": (0.0, 0.0,", "= read_calibration(calibration_file) calibration_dict[\"servo_offsets\"] = servo_calib.servos s1, s2, s3 = servo_calib.servos", "# If menu is pressed before the calibration is complete", "list(np.linspace(hue_low, hue_high, hue_steps)) detected_hues = [] for hue in hue_options:", "vel_y_hist = [1.0 for _ in range(100)] # Run until", "calibration # Warning! This mutates the state! env.reset_calibration(calibration_file=calibration_file) if servo_calib.success:", "time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133, 133) run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos()", "last 100 timesteps is under the limit if (prev_100_x <", "__iter__(self): return iter(astuple(self)) def ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01): #", "pressed before the calibration is complete def __iter__(self): return iter(astuple(self))", "cache. By sending a new hue (hue + 1) invalidates", "Calibrate servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter without\\n stand.\\n\\n\"", "# Update the environment to use the new calibration #", "kwargs[\"env\"] hardware = env.hardware camera_fn = hardware.camera detector_fn = hardware.detector", "scrolling=True) else: hue_str = ( f\"Hue calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\"", "outfile: log.info(f\"Creating calibration file {calibration_file}\") json.dump(calibration_dict, outfile, indent=4, sort_keys=True) def", "@dataclass class CalibServos: servos: Tuple[float, float, float] = (0.0, 0.0,", "# Display message and wait for joystick hardware.display( \"put ball", "# Calibrate position pos_calib = calibrate_pos(camera_fn, detector_fn, hue_calib.hue, is_menu_down) if", "calibration_dict[\"servo_offsets\"] = servo_calib.servos s1, s2, s3 = servo_calib.servos write_calibration(calibration_dict) #", "time.time() action = Vector2(0, 0) # Initial high vel_history (to", "hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter without\\n stand.\\n\\n\" \"Click joystick\\nto continue.\",", "CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y) prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y =", "\"/tmp/hue\" if hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename += f\".fail.{time_of_day}.jpg\"", "kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream(): # Get some hidden things", "and subtracting the `default` zeroed # position of the servos.", "failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\" if hue_calib.success: filename", "use the new calibration # Warning! This mutates the state!", "def main(calibration_file, frequency=30, debug=True): pid_fn = pid_controller(frequency=frequency) with MoabEnv(frequency=frequency, debug=debug)", "joystick\", # \"Place ball in\\ncenter using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen ready.\"", "mutates the state! hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and hue_calib.success: # and", "= state # Quit on menu down if buttons.menu_button: return", "calibration.\") if os.path.isfile(calibration_file): with open(calibration_file, \"r\") as f: calibration_dict =", "Use defaults calibration_dict = { \"ball_hue\": 44, \"plate_offsets\": (0.0, 0.0),", "0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133, 133, 133) run_calibration(env, pid_fn, calibration_file)", "hardware.display( f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\", scrolling=True,", "import json import argparse import datetime import numpy as np", "the plate could be stabilized in time_limit seconds, quit log.warning(f\"Servo", "Display message and wait for joystick hardware.display( \"put ball on", "# Save calibration calibration_dict = read_calibration(calibration_file) calibration_dict[\"ball_hue\"] = hue_calib.hue calibration_dict[\"plate_offsets\"]", "ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue) if len(detected_hues)", "= 41 # Is 41 instead of 40 so that", "at the # current stable position and subtracting the `default`", "return buttons time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1 / 30): while True:", "servo_calib.servos s1, s2, s3 = servo_calib.servos write_calibration(calibration_dict) # Update the", "roughly in the center that is large enough if ball_detected", "Corporation. # Licensed under the MIT License. \"\"\" Calibration Controller", "servos_zeroed) return CalibServos(servos=servo_offsets, success=True) # If the plate could be", "- servos_zeroed) return CalibServos(servos=servo_offsets, success=True) # If the plate could", "\\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else \"(X, Y) calib:\\nfailed\\n\\n\"", "hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True ) hardware.go_up() def calibrate_controller(**kwargs): run_calibration(", "calib results env = kwargs[\"env\"] hardware = env.hardware camera_fn =", "give a good calibration raise NotImplementedError # Get some hidden", "prev_100_x = np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \", (prev_100_x,", "env.hardware camera_fn = hardware.camera detector_fn = hardware.detector # Start the", "log.info(\"Writing calibration.\") # write out stuff with open(calibration_file, \"w+\") as", "y, radius): x_offset = round(x, 3) y_offset = round(y, 3)", "success=True) log.warning(f\"Offset calibration failed.\") return CalibPos() def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005,", "calibration_dict = json.load(f) else: # Use defaults calibration_dict = {", "=\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\\n\\n\" \"Click menu\\nto return...\\n\", scrolling=True, ) print(f\"servo offsets", "file {calibration_file}\") json.dump(calibration_dict, outfile, indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"): log.info(\"Reading previous", "= np.arctan2(sin_mean, cos_mean) avg_hue = np.degrees(avg_hue_rad) % 360 # Convert", "scrolling=True) elif not (pos_calib.success or hue_calib.success): # or servo_calib.success): hardware.display(\"Calibration", "env.hardware.set_servos(133, 133, 133) run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos() if __name__ ==", ") def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low = 0 hue_high =", "= pos_calib.position x_offset, y_offset = pos_calib.position write_calibration(calibration_dict) # Update the", "new calibration # Warning! This mutates the state! env.reset_calibration(calibration_file=calibration_file) if", "wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True)", "else: log.warning(f\"Hue calibration failed.\") return CalibHue() def calibrate_pos(camera_fn, detector_fn, hue,", "np import logging as log from env import MoabEnv from", "calibration is complete def __iter__(self): return iter(astuple(self)) @dataclass class CalibPos:", "Convert back to [0, 360] print(f\"Hues are: {detected_hues}\") print(f\"Hue calibrated:", "up if is_menu_down_fn(): return CalibPos(early_quit=True) img_frame, elapsed_time = camera_fn() ball_detected,", "= camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue, debug=True)", "= env.hardware camera_fn = hardware.camera detector_fn = hardware.detector menu_button =", "hue_calib.success: filename += f\".{hue_calib.hue}.{time_of_day}.jpg\" else: filename += f\".fail.{time_of_day}.jpg\" img_frame, _", "wait_for_menu_and_stream def main(calibration_file, frequency=30, debug=True): pid_fn = pid_controller(frequency=frequency) with MoabEnv(frequency=frequency,", "100: \", (prev_100_x, prev_100_y)) # If the average velocity for", "center and too small return ( np.abs(x) < max_ball_dist and", "filename += f\".fail.{time_of_day}.jpg\" img_frame, _ = camera_fn() # Huemask keeps", "44 # Reasonable default success: bool = False early_quit: bool", "wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib", "y, radius): log.info( f\"hue={hue:0.3f}, ball_detected={ball_detected}, \" f\"(x, y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\"", "print(\"Prev 100: \", (prev_100_x, prev_100_y)) # If the average velocity", "def __iter__(self): return iter(astuple(self)) @dataclass class CalibServos: servos: Tuple[float, float,", "ball has stabilized or the time limit was reached while", "or buttons.joy_button: return buttons time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1 / 30):", "def __iter__(self): return iter(astuple(self)) def ball_close_enough(x, y, radius, max_ball_dist=0.045, min_ball_dist=0.01):", "for i in range(10): # Try and detect for 10", "np.radians(detected_hues) sines, cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean = np.mean(sines),", "hue_calib.early_quit: hardware.go_up() return # Calibrate position pos_calib = calibrate_pos(camera_fn, detector_fn,", "of what the moab camera # sees (useful for debugging", "or failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\" if hue_calib.success:", "if buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating\\nservos...\", scrolling=True) servo_calib", "menu or joy: break env.hardware.go_up() return wait_for_menu_and_stream def main(calibration_file, frequency=30,", "def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"], ) def wait_for_menu_and_stream(): #", "too small return ( np.abs(x) < max_ball_dist and np.abs(y) <", "and np.abs(y) < max_ball_dist and radius > min_ball_dist ) def", "numpy as np import logging as log from env import", "calibrated: {avg_hue:0.2f}\") print(f\"Avg hue: {avg_hue:0.2f}\") return CalibHue(hue=int(avg_hue), success=True) else: log.warning(f\"Hue", "hue_calib.success): # or servo_calib.success): hardware.display(\"Calibration failed\\nClick menu...\", scrolling=True) else: hue_str", "env.hardware.disable_servos() if __name__ == \"__main__\": # Parse command line args", "= False # If menu is pressed before the calibration", "y)={x:0.3f} {y:0.3f}, radius={radius:0.3f}\" ) detected_hues.append(hue) if len(detected_hues) > 0: #", "calibrate_pos(camera_fn, detector_fn, hue, is_menu_down_fn): for i in range(10): # Try", "{ \"ball_hue\": 44, \"plate_offsets\": (0.0, 0.0), \"servo_offsets\": (0.0, 0.0, 0.0),", "buttons\"\"\" while True: buttons = hardware.get_buttons() if buttons.menu_button or buttons.joy_button:", "stand\\nclick joystick\", # \"Place ball in\\ncenter using\\nclear stand.\\n\\n\" \"Click joystick\\nwhen", "= servo_calib.servos write_calibration(calibration_dict) # Update the environment to use the", "MoabEnv(frequency=frequency, debug=debug) as env: env.step((0, 0)) time.sleep(0.2) env.hardware.enable_servos() time.sleep(0.2) env.hardware.set_servos(133,", "run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos() if __name__ == \"__main__\": # Parse", "ready.\" scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early", "@dataclass class CalibPos: position: Tuple[float, float] = (0.0, 0.0) success:", "# Licensed under the MIT License. \"\"\" Calibration Controller Performs", "calibration failed.\") return CalibPos() def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20): start_time", "failed.\") return CalibServos() def write_calibration(calibration_dict, calibration_file=\"bot.json\"): log.info(\"Writing calibration.\") # write", "return calibration_dict def wait_for_joystick_or_menu(hardware, sleep_time=1 / 30): \"\"\"Waits for either", "joystick or the menu. Returns the buttons\"\"\" while True: buttons", "the time and whether it succeeded or failed time_of_day =", "detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True, filename=filename) hardware.go_up() def run_servo_calibration(env, pid_fn,", "menu down if buttons.menu_button: return CalibServos(early_quit=True) if ball_detected: vel_x_hist.append(vel_x) vel_y_hist.append(vel_y)", "the hue calibration fails) # Have a nice filename with", "cosines = np.sin(detected_hues_rad), np.cos(detected_hues_rad) sin_mean, cos_mean = np.mean(sines), np.mean(cosines) avg_hue_rad", "= False early_quit: bool = False # If menu is", "large enough if ball_detected and ball_close_enough(x, y, radius): log.info( f\"hue={hue:0.3f},", "servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball in\\ncenter without\\n stand.\\n\\n\" \"Click", "When the calibration is complete, save the image of what", "def calibrate_hue(camera_fn, detector_fn, is_menu_down_fn): hue_low = 0 hue_high = 360", "np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \", (prev_100_x, prev_100_y)) # If the average", "\"Place ball in\\ncenter without\\n stand.\\n\\n\" \"Click joystick\\nto continue.\", scrolling=True, )", "detector_fn = hardware.detector def is_menu_down(hardware=hardware) -> bool: return hardware.get_buttons().menu_button #", "continue.\", scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if buttons.menu_button: # Early", "the new calibration # Warning! This mutates the state! env.reset_calibration(calibration_file=calibration_file)", "(hue + 1) invalidates # the cache. TODO: added this", "things from env to be able to stream the calib", "time.sleep(sleep_time) def wait_for_menu(hardware, sleep_time=1 / 30): while True: menu_button, joy_button,", "print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\",", "in\\ncenter without\\n stand.\\n\\n\" \"Click joystick\\nto continue.\", scrolling=True, ) buttons =", "__name__ == \"__main__\": # Parse command line args parser =", "44, \"plate_offsets\": (0.0, 0.0), \"servo_offsets\": (0.0, 0.0, 0.0), } return", "new calibration # Warning! This mutates the state! hardware.reset_calibration(calibration_file=calibration_file) if", "int = 44 # Reasonable default success: bool = False", "s1, s2, s3 = servo_calib.servos write_calibration(calibration_dict) # Update the environment", "for debugging when the hue calibration fails) # Have a", "= camera_fn() # Huemask keeps an internal cache. By sending", "+ 1, debug=True, filename=filename) hardware.go_up() def run_servo_calibration(env, pid_fn, calibration_file): #", "hardware.set_angles(0, 0) # Calibrate servo offsets hardware.display( \"Calibarating\\nservos\\n\\n\" \"Place ball", ") pos_str = ( f\"Position \\ncalib:\\nsuccessful\\nPosition = \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if", "astuple from hardware import plate_angles_to_servo_positions @dataclass class CalibHue: hue: int", "some hidden things from env hardware = env.hardware camera_fn =", "calibration is complete def __iter__(self): return iter(astuple(self)) @dataclass class CalibServos:", "log.warning(f\"Offset calibration failed.\") return CalibPos() def calibrate_servo_offsets(pid_fn, env, stationary_vel=0.005, time_limit=20):", "Start the calibration with uncalibrated servos hardware.servo_offsets = (0, 0,", "action = Vector2(0, 0) # Initial high vel_history (to use", "True: buttons = hardware.get_buttons() if buttons.menu_button or buttons.joy_button: return buttons", "it succeeded or failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\"", "np.abs(y) < max_ball_dist and radius > min_ball_dist ) def calibrate_hue(camera_fn,", "nice filename with the time and whether it succeeded or", "1, debug=True, filename=filename) hardware.go_up() def run_servo_calibration(env, pid_fn, calibration_file): # Warning:", "but doesn't currently give a good calibration raise NotImplementedError #", "hue={hue_calib.hue}\\nClick menu...\", scrolling=True) elif not (pos_calib.success or hue_calib.success): # or", "menu\\nto return...\\n\", scrolling=True, ) print(f\"servo offsets =\\n({s1:.2f}, {s2:.2f}, {s3:.2f})\") else:", "first hardware.set_angles(0, 0) # Display message and wait for joystick", "log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset calibration", "buttons.menu_button: # Early quit hardware.go_up() return hardware.display(\"Calibrating...\") hue_calib = calibrate_hue(camera_fn,", "lift plate up first hardware.set_angles(0, 0) # Display message and", "return def run_calibration(env, pid_fn, calibration_file): # Get some hidden things", "if pos_calib.early_quit: hardware.go_up() return # Save calibration calibration_dict = read_calibration(calibration_file)", "if hue_calib.early_quit: hardware.go_up() return # Calibrate position pos_calib = calibrate_pos(camera_fn,", "( f\"Hue calib:\\nsuccessful\\nBall hue = {hue_calib.hue}\\n\\n\" if hue_calib.success else \"Hue", ") def wait_for_menu_and_stream(): # Get some hidden things from env", "range(100)] vel_y_hist = [1.0 for _ in range(100)] # Run", "return CalibServos(servos=servo_offsets, success=True) # If the plate could be stabilized", "log.info(f\"Creating calibration file {calibration_file}\") json.dump(calibration_dict, outfile, indent=4, sort_keys=True) def read_calibration(calibration_file=\"bot.json\"):", "= env.step(action) action, info = pid_fn(state) (x, y, vel_x, vel_y,", "= \\n({100*x_offset:.1f}, {100*y_offset:.1f})cm\\n\\n\" if hue_calib.success else \"(X, Y) calib:\\nfailed\\n\\n\" )", "Calibration Controller Performs calibration for hue, center of camera position,", "for 10 frames before giving up if is_menu_down_fn(): return CalibPos(early_quit=True)", "calibration is complete def __iter__(self): return iter(astuple(self)) def ball_close_enough(x, y,", "`default` zeroed # position of the servos. servos = np.array(plate_angles_to_servo_positions(*action))", "searching for a state bug detector_fn(img_frame, hue=hue_calib.hue + 1, debug=True,", "servo calib works but doesn't currently give a good calibration", "env.hardware camera_fn = hardware.camera detector_fn = hardware.detector menu_button = False", "elapsed_time = camera_fn() ball_detected, ((x, y), radius) = detector_fn(img_frame, hue=hue,", "the calib results env = kwargs[\"env\"] hardware = env.hardware camera_fn", "the last 100 timesteps is under the limit if (prev_100_x", "{hue_calib.hue}\\n\\n\" if hue_calib.success else \"Hue calib:\\nfailed\\n\\n\" ) pos_str = (", "high vel_history (to use the vel_hist[-100:] later) vel_x_hist = [1.0", "from typing import Tuple from common import Vector2 from detector", "[{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset calibration failed.\") return", "stand.\\n\\n\" \"Click joystick\\nwhen ready.\" scrolling=True, ) buttons = wait_for_joystick_or_menu(hardware) if", "= env.hardware camera_fn = hardware.camera detector_fn = hardware.detector def is_menu_down(hardware=hardware)", "succeeded or failed time_of_day = datetime.datetime.now().strftime(\"%H%M%S\") filename = \"/tmp/hue\" if", "else: hardware.display( \"Calibration\\nfailed\\n\\nClick menu\\nto return...\", scrolling=True ) hardware.go_up() def calibrate_controller(**kwargs):", "menu, joy, _, _ = hardware.get_buttons() if menu or joy:", "position of the servos. servos = np.array(plate_angles_to_servo_positions(*action)) servos_zeroed = np.array(plate_angles_to_servo_positions(0,", "hardware.reset_calibration(calibration_file=calibration_file) if pos_calib.success and hue_calib.success: # and servo_calib.success: hardware.display(f\"Ok! Ball", "if pos_calib.success and hue_calib.success: # and servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick", "the calibration is complete def __iter__(self): return iter(astuple(self)) def ball_close_enough(x,", "y_offset = round(y, 3) log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset,", "return...\", scrolling=True ) hardware.go_up() def calibrate_controller(**kwargs): run_calibration( kwargs[\"env\"], kwargs[\"pid_fn\"], kwargs[\"calibration_file\"],", "while searching for a state bug detector_fn(img_frame, hue=hue_calib.hue + 1,", "133) run_calibration(env, pid_fn, calibration_file) env.hardware.disable_servos() if __name__ == \"__main__\": #", "hue=hue, debug=True) # If we found a ball roughly in", "np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \", (prev_100_x, prev_100_y)) #", "or joy: break env.hardware.go_up() return wait_for_menu_and_stream def main(calibration_file, frequency=30, debug=True):", "= np.mean(sines), np.mean(cosines) avg_hue_rad = np.arctan2(sin_mean, cos_mean) avg_hue = np.degrees(avg_hue_rad)", "pos_calib.success and hue_calib.success: # and servo_calib.success: hardware.display(f\"Ok! Ball hue={hue_calib.hue}\\nClick menu...\",", "hue (hue + 1) invalidates # the cache. TODO: added", "default success: bool = False early_quit: bool = False #", "3) log.info(f\"Offset calibrated: [{x_offset:.3f}, {y_offset:.3f}]\") return CalibPos(position=(x_offset, y_offset), success=True) log.warning(f\"Offset", "= np.mean(np.abs(vel_x_hist[-100:])) prev_100_y = np.mean(np.abs(vel_y_hist[-100:])) print(\"Prev 100: \", (prev_100_x, prev_100_y))", "+ \"Click menu\\nto return...\\n\", scrolling=True, ) # When the calibration", "Get some hidden things from env to be able to" ]
[ "\"useremail.com\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\":", "name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] invalid_rows =", "timedelta from django.test import TestCase from django.test.utils import override_settings from", "\"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\",", "\"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_2\", \"email\": \"<EMAIL>\",", "self.assertEqual(\"SUCCESS\", task.state) task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = delete_multiple_contacts_tasks.apply(", "\"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] task = upload_csv_file.apply( (", "from marketing.tasks import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns,", "TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def", "send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, ) from marketing.tests import TestMarketingModel class TestCeleryTasks(TestMarketingModel,", "\"Telangana\", }, { \"company name\": \"company_name_2\", \"email\": \"user2@email\", \"first name\":", "[ { \"company name\": \"company_name_1\", \"email\": \"useremail.com\", \"first name\": \"first_name\",", "override_settings from marketing.tasks import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact,", "\"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_2\",", "= datetime.now() self.campaign.save() task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task =", "import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, )", "django.test.utils import override_settings from marketing.tasks import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns,", "name\": \"company_name_1\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\",", "\"company name\": \"company_name_4\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\":", "task.state) valid_rows = [ { \"company name\": \"company_name_1\", \"email\": \"<EMAIL>\",", "{ \"company name\": \"company_name_3\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last", "invalid_rows = [ { \"company name\": \"company_name_1\", \"email\": \"useremail.com\", \"first", "list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task =", "self.assertEqual(\"SUCCESS\", task.state) task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task = send_scheduled_campaigns.apply()", "task.state) task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) valid_rows =", "\"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_3\",", "{ \"company name\": \"company_name_2\", \"email\": \"user2@email\", \"first name\": \"first_name\", \"last", "from marketing.tests import TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True,", "\"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_2\", \"email\":", "marketing.tests import TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\",", "\"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] invalid_rows = [ {", "\"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_3\", \"email\":", ") self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time = datetime.now() self.campaign.save() task = run_all_campaigns.apply()", "self.assertEqual(\"SUCCESS\", task.state) valid_rows = [ { \"company name\": \"company_name_1\", \"email\":", "}, { \"company name\": \"company_name_2\", \"email\": \"user2@email\", \"first name\": \"first_name\",", "] task = upload_csv_file.apply( ( valid_rows, invalid_rows, self.user.id, [ self.contact_list.id,", "django.test import TestCase from django.test.utils import override_settings from marketing.tasks import", "\"Telangana\", }, { \"company name\": \"company_name_2\", \"email\": \"<EMAIL>\", \"first name\":", "= delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state) task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,),", ") self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email = None self.campaign.save() task = run_campaign.apply(", "\"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_4\", \"email\":", "\"company name\": \"company_name_1\", \"email\": \"useremail.com\", \"first name\": \"first_name\", \"last name\":", "TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self): task", "\"state\": \"Telangana\", }, { \"company name\": \"company_name_2\", \"email\": \"<EMAIL>\", \"first", "self.assertEqual(\"SUCCESS\", task.state) task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state) task", "(self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state) task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\",", "from datetime import datetime, timedelta from django.test import TestCase from", "\"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] task", "\"company name\": \"company_name_1\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\":", "\"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_2\", \"email\": \"user2@email\",", "\"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] task = upload_csv_file.apply(", "= list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task", "task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state) task = send_campaign_email_to_admin_contact.apply(", "name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", },", "= run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email = None self.campaign.save()", "upload_csv_file.apply( ( valid_rows, invalid_rows, self.user.id, [ self.contact_list.id, ], self.company.id, ),", "\"state\": \"Telangana\", }, { \"company name\": \"company_name_3\", \"email\": \"<EMAIL>\", \"first", "CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self): task = run_campaign.apply( (self.campaign.id,),", "marketing.tasks import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file,", "task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,), )", "run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, ) from marketing.tests import TestMarketingModel", "\"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_4\",", "self.campaign.save() task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\",", "TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self): task =", "None self.campaign.save() task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time", "task.state) self.campaign.schedule_date_time = datetime.now() self.campaign.save() task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state)", "import override_settings from marketing.tasks import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign,", "\"Telangana\", }, { \"company name\": \"company_name_3\", \"email\": \"<EMAIL>\", \"first name\":", "self.campaign.schedule_date_time = datetime.now() self.campaign.save() task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task", "= [ { \"company name\": \"company_name_1\", \"email\": \"useremail.com\", \"first name\":", "\"company_name_2\", \"email\": \"user2@email\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\":", "task.state) task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state) task =", "{ \"company name\": \"company_name_2\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last", "name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] task =", "\"email\": \"user2@email\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\",", "import datetime, timedelta from django.test import TestCase from django.test.utils import", "\"Hyderabad\", \"state\": \"Telangana\", }, ] invalid_rows = [ { \"company", "{ \"company name\": \"company_name_4\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last", "\"user2@email\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\":", "@override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self): task = run_campaign.apply(", "\"email\": \"useremail.com\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\",", "delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, ) from marketing.tests", ") from marketing.tests import TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,", "= send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\",", "name\": \"company_name_4\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\",", "send_scheduled_campaigns, upload_csv_file, ) from marketing.tests import TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase):", "self.campaign.reply_to_email = None self.campaign.save() task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\",", "}, ] task = upload_csv_file.apply( ( valid_rows, invalid_rows, self.user.id, [", "valid_rows = [ { \"company name\": \"company_name_1\", \"email\": \"<EMAIL>\", \"first", "datetime, timedelta from django.test import TestCase from django.test.utils import override_settings", "\"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\",", "}, { \"company name\": \"company_name_4\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\",", "self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time = datetime.now() self.campaign.save() task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\",", "self.assertEqual(\"SUCCESS\", task.state) task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) valid_rows", "= None self.campaign.save() task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state)", "task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time = datetime.now()", "task.state) task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\",", "self.campaign.save() task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time =", "\"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\":", "datetime.now() self.campaign.save() task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = list_all_bounces_unsubscribes.apply()", "}, { \"company name\": \"company_name_2\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\",", "upload_csv_file, ) from marketing.tests import TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings(", "= run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time = datetime.now() self.campaign.save()", "\"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] invalid_rows = [", "\"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_4\", \"email\": \"<EMAIL>\",", "( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, ) from", "= send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) valid_rows = [ {", "\"company_name_3\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\":", "valid_rows, invalid_rows, self.user.id, [ self.contact_list.id, ], self.company.id, ), ) self.assertEqual(\"SUCCESS\",", "class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self):", "task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email = None", "task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state)", "\"company_name_1\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\":", "task.state) task = send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,),", "\"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company", "\"Telangana\", }, { \"company name\": \"company_name_4\", \"email\": \"<EMAIL>\", \"first name\":", "\"company name\": \"company_name_3\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\":", "run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email = None self.campaign.save() task", "= run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task", "datetime import datetime, timedelta from django.test import TestCase from django.test.utils", "from django.test import TestCase from django.test.utils import override_settings from marketing.tasks", "\"state\": \"Telangana\", }, { \"company name\": \"company_name_2\", \"email\": \"user2@email\", \"first", ") self.assertEqual(\"SUCCESS\", task.state) task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state)", "test_celery_tasks(self): task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email =", "task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) valid_rows = [", "send_campaign_email_to_admin_contact.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) valid_rows = [ { \"company", "name\": \"company_name_2\", \"email\": \"user2@email\", \"first name\": \"first_name\", \"last name\": \"last_name\",", "\"state\": \"Telangana\", }, ] task = upload_csv_file.apply( ( valid_rows, invalid_rows,", ") self.assertEqual(\"SUCCESS\", task.state) valid_rows = [ { \"company name\": \"company_name_1\",", "}, { \"company name\": \"company_name_3\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\",", ") def test_celery_tasks(self): task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state)", "from django.test.utils import override_settings from marketing.tasks import ( delete_multiple_contacts_tasks, list_all_bounces_unsubscribes,", "[ { \"company name\": \"company_name_1\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\",", "import TestCase from django.test.utils import override_settings from marketing.tasks import (", "] invalid_rows = [ { \"company name\": \"company_name_1\", \"email\": \"useremail.com\",", "\"company name\": \"company_name_2\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\":", "( valid_rows, invalid_rows, self.user.id, [ self.contact_list.id, ], self.company.id, ), )", "{ \"company name\": \"company_name_1\", \"email\": \"useremail.com\", \"first name\": \"first_name\", \"last", "}, ] invalid_rows = [ { \"company name\": \"company_name_1\", \"email\":", "import TestMarketingModel class TestCeleryTasks(TestMarketingModel, TestCase): @override_settings( CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", )", "name\": \"company_name_1\", \"email\": \"useremail.com\", \"first name\": \"first_name\", \"last name\": \"last_name\",", "list_all_bounces_unsubscribes, run_all_campaigns, run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, ) from marketing.tests import", "run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state) task =", "\"company name\": \"company_name_2\", \"email\": \"user2@email\", \"first name\": \"first_name\", \"last name\":", "(self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time = datetime.now() self.campaign.save() task =", "def test_celery_tasks(self): task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email", "send_scheduled_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state)", "name\": \"company_name_3\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\",", "self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email = None self.campaign.save() task = run_campaign.apply( (self.campaign.id,),", "\"Telangana\", }, ] invalid_rows = [ { \"company name\": \"company_name_1\",", "name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\":", "\"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, {", "invalid_rows, self.user.id, [ self.contact_list.id, ], self.company.id, ), ) self.assertEqual(\"SUCCESS\", task.state)", "\"company_name_1\", \"email\": \"useremail.com\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\":", "delete_multiple_contacts_tasks.apply( (self.contact_list.id,), ) self.assertEqual(\"SUCCESS\", task.state) task = send_campaign_email_to_admin_contact.apply( (self.campaign.id,), )", "run_campaign, send_campaign_email_to_admin_contact, send_scheduled_campaigns, upload_csv_file, ) from marketing.tests import TestMarketingModel class", "task = run_all_campaigns.apply() self.assertEqual(\"SUCCESS\", task.state) task = list_all_bounces_unsubscribes.apply() self.assertEqual(\"SUCCESS\", task.state)", "task.state) self.campaign.reply_to_email = None self.campaign.save() task = run_campaign.apply( (self.campaign.id,), )", "{ \"company name\": \"company_name_1\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last", "run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.schedule_date_time = datetime.now() self.campaign.save() task", "\"Hyderabad\", \"state\": \"Telangana\", }, { \"company name\": \"company_name_3\", \"email\": \"<EMAIL>\",", "\"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ] invalid_rows", "\"Hyderabad\", \"state\": \"Telangana\", }, ] task = upload_csv_file.apply( ( valid_rows,", "\"Telangana\", }, ] task = upload_csv_file.apply( ( valid_rows, invalid_rows, self.user.id,", "\"company_name_4\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\":", "= upload_csv_file.apply( ( valid_rows, invalid_rows, self.user.id, [ self.contact_list.id, ], self.company.id,", "TestCase from django.test.utils import override_settings from marketing.tasks import ( delete_multiple_contacts_tasks,", "BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self): task = run_campaign.apply( (self.campaign.id,), ) self.assertEqual(\"SUCCESS\",", "(self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) valid_rows = [ { \"company name\":", "\"company_name_2\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\", \"city\":", "name\": \"company_name_2\", \"email\": \"<EMAIL>\", \"first name\": \"first_name\", \"last name\": \"last_name\",", "\"state\": \"Telangana\", }, ] invalid_rows = [ { \"company name\":", "\"state\": \"Telangana\", }, { \"company name\": \"company_name_4\", \"email\": \"<EMAIL>\", \"first", "CELERY_ALWAYS_EAGER=True, BROKER_BACKEND=\"memory\", ) def test_celery_tasks(self): task = run_campaign.apply( (self.campaign.id,), )", "\"first_name\", \"last name\": \"last_name\", \"city\": \"Hyderabad\", \"state\": \"Telangana\", }, ]", "task = upload_csv_file.apply( ( valid_rows, invalid_rows, self.user.id, [ self.contact_list.id, ],", "(self.campaign.id,), ) self.assertEqual(\"SUCCESS\", task.state) self.campaign.reply_to_email = None self.campaign.save() task =", "= [ { \"company name\": \"company_name_1\", \"email\": \"<EMAIL>\", \"first name\":" ]
[ "* page = web() page.create() # Header Parameters # text", "Parameters # text = label text # color = label", "header text # n = title level page.header(text='My Site', n=1)", "title level page.header(text='My Site', n=1) # Label Parameters # text", "# Header Parameters # text = header text # n", "Parameters # text = header text # n = title", "page = web() page.create() # Header Parameters # text =", "# Label Parameters # text = label text # color", "= web() page.create() # Header Parameters # text = header", "Header Parameters # text = header text # n =", "text = header text # n = title level page.header(text='My", "Site', n=1) # Label Parameters # text = label text", "= label text # color = label color page.label(text='', color='')", "label text # color = label color page.label(text='', color='') page.compile()", "page.header(text='My Site', n=1) # Label Parameters # text = label", "level page.header(text='My Site', n=1) # Label Parameters # text =", "<filename>doc's/3-labels_and_titles.py<gh_stars>0 from py2html.main import * page = web() page.create() #", "= header text # n = title level page.header(text='My Site',", "text # n = title level page.header(text='My Site', n=1) #", "import * page = web() page.create() # Header Parameters #", "page.create() # Header Parameters # text = header text #", "from py2html.main import * page = web() page.create() # Header", "web() page.create() # Header Parameters # text = header text", "# text = header text # n = title level", "n = title level page.header(text='My Site', n=1) # Label Parameters", "n=1) # Label Parameters # text = label text #", "Label Parameters # text = label text # color =", "# n = title level page.header(text='My Site', n=1) # Label", "# text = label text # color = label color", "= title level page.header(text='My Site', n=1) # Label Parameters #", "text = label text # color = label color page.label(text='',", "py2html.main import * page = web() page.create() # Header Parameters" ]
[ "= ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper =", "-D _VASP/Fe/flip00000 -E Fe -J1 -U mRy\".split(\" \") options =", "options = TestPickupIron.options(*_input) elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\")", "\"test -R _VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E Fe -J1 -U mRy\".split(\"", "_input = \"test -R _VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E Fe -J1", "TestPickupIron(unittest.TestCase): @staticmethod def options(*args): return CommandLineOptions(*args) def test_iron_001(self): _input =", "CommandLineOptions(*args) def test_iron_001(self): _input = \"test -R _VASP/Fe/noFlip -D _VASP/Fe/flip00000", "\") options = TestPickupIron.options(*_input) elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref =", "= Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref()) self.assertEqual(options('units'),'mRy') _J_ij", "ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref()) self.assertEqual(options('units'),'mRy')", "pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref()) self.assertEqual(options('units'),'mRy') _J_ij = pickerUpper.solve(units=options('units')).flatten() self.assertEqual(_J_ij[0],1.1861042008301703) self.assertEqual(_J_ij[1],4.157645364906014)", "from JorGpi.pickup.pickup import SmartPickUp,Reference,CommandLineOptions class TestPickupIron(unittest.TestCase): @staticmethod def options(*args): return", "options(*args): return CommandLineOptions(*args) def test_iron_001(self): _input = \"test -R _VASP/Fe/noFlip", "def options(*args): return CommandLineOptions(*args) def test_iron_001(self): _input = \"test -R", "= TestPickupIron.options(*_input) elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0)", "-R _VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E Fe -J1 -U mRy\".split(\" \")", "test_iron_001(self): _input = \"test -R _VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E Fe", "import SmartPickUp,Reference,CommandLineOptions class TestPickupIron(unittest.TestCase): @staticmethod def options(*args): return CommandLineOptions(*args) def", "self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref())", "-J1 -U mRy\".split(\" \") options = TestPickupIron.options(*_input) elements = ''.join(options('elements'))", "@staticmethod def options(*args): return CommandLineOptions(*args) def test_iron_001(self): _input = \"test", "_VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E Fe -J1 -U mRy\".split(\" \") options", "-U mRy\".split(\" \") options = TestPickupIron.options(*_input) elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$')", "''.join(options('elements')) self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements)", "TestPickupIron.options(*_input) elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1)", "import unittest from JorGpi.pickup.pickup import SmartPickUp,Reference,CommandLineOptions class TestPickupIron(unittest.TestCase): @staticmethod def", "_VASP/Fe/flip00000 -E Fe -J1 -U mRy\".split(\" \") options = TestPickupIron.options(*_input)", "def test_iron_001(self): _input = \"test -R _VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E", "class TestPickupIron(unittest.TestCase): @staticmethod def options(*args): return CommandLineOptions(*args) def test_iron_001(self): _input", "Fe -J1 -U mRy\".split(\" \") options = TestPickupIron.options(*_input) elements =", "unittest from JorGpi.pickup.pickup import SmartPickUp,Reference,CommandLineOptions class TestPickupIron(unittest.TestCase): @staticmethod def options(*args):", "JorGpi.pickup.pickup import SmartPickUp,Reference,CommandLineOptions class TestPickupIron(unittest.TestCase): @staticmethod def options(*args): return CommandLineOptions(*args)", "Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref()) self.assertEqual(options('units'),'mRy') _J_ij =", "mRy\".split(\" \") options = TestPickupIron.options(*_input) elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref", "SmartPickUp,Reference,CommandLineOptions class TestPickupIron(unittest.TestCase): @staticmethod def options(*args): return CommandLineOptions(*args) def test_iron_001(self):", "return CommandLineOptions(*args) def test_iron_001(self): _input = \"test -R _VASP/Fe/noFlip -D", "-E Fe -J1 -U mRy\".split(\" \") options = TestPickupIron.options(*_input) elements", "elements = ''.join(options('elements')) self.assertEqual(elements,'Fe$') ref = Reference(options('reference')+\"/POSCAR\") self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper", "self.assertEqual(ref(),0) self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref()) self.assertEqual(options('units'),'mRy') _J_ij = pickerUpper.solve(units=options('units')).flatten()", "self.assertEqual(options('number_of_interactions'),1) pickerUpper = SmartPickUp(options('number_of_interactions'),elements) pickerUpper.read(options('reference'),*options('directories'),reference=ref()) self.assertEqual(options('units'),'mRy') _J_ij = pickerUpper.solve(units=options('units')).flatten() self.assertEqual(_J_ij[0],1.1861042008301703)", "= \"test -R _VASP/Fe/noFlip -D _VASP/Fe/flip00000 -E Fe -J1 -U" ]
[ "from django.contrib import admin from django.contrib.auth import get_user_model User =", "<reponame>alaraayan/todo-backend<filename>jwt_auth/admin.py from django.contrib import admin from django.contrib.auth import get_user_model User", "import admin from django.contrib.auth import get_user_model User = get_user_model() admin.site.register(User)", "django.contrib import admin from django.contrib.auth import get_user_model User = get_user_model()" ]
[ "inner, mid or outer spot\", ) spot_ids[spot[\"id\"]] = 2 self.assertEquals(", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 12, }, ) self.assertEquals( response.status_code,", "Client() response = c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code, 200, \"Accepts a", "response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"bad_data\", \"distance\":", ") self.assertEquals( response.status_code, 200, \"Accepts a query with bad height\"", "or outer spot\", ) spot_ids[spot[\"id\"]] = 2 self.assertEquals( spot_ids[inner_left.pk], 2,", "or mid spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing limits", "distance\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "def test_no_params(self): c = Client() response = c.get(\"/api/v1/spot\", {}) self.assertEquals(", "\"Mid left was selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk],", "\"center_longitude\": center_long, \"distance\": 110, \"limit\": 0, }, ) self.assertEquals( response.status_code,", "\"Accepts a query with bad longitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top was selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2,", "\"Found 8 far out spots to fill in the limit", "height\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "mock import patch from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase):", "self.assertEquals(len(spots), 4, \"Returns 4 spots\") spot_ids = { inner_left.pk: 1,", "= Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom =", "= { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1,", "1, outer_bottom.pk: 1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]],", "University of Washington # SPDX-License-Identifier: Apache-2.0 from django.test import TestCase", "\"bad_data\", \"center_longitude\": -40, \"distance\": 10, }, ) self.assertEquals( response.status_code, 200,", "longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left = Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"),", "1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, } far_out_count =", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 130, \"limit\":", "= 30.000000 center_long = -40.000000 # Inner spots are 10", "inner_top.pk: 1, inner_bottom.pk: 1, mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1,", "header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns 6 spots\")", "selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid bottom was selected\" ) #", "= c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"bad_data\", \"distance\": \"10\",", "\"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\": -190, \"distance\": 10}, ) self.assertEquals( response.status_code,", "\"Should return no matches\" ) def test_invalid_longitude(self): c = Client()", "matches\" ) def test_large_longitude(self): c = Client() response = c.get(", "mid spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing the outer", "matches a unique inner, mid or outer spot\", ) spot_ids[spot[\"id\"]]", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, \"limit\": 0,", "outside in, so things that sort by # primary key", "query\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "20 spaces response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "150, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", "1, outer_top.pk: 1, outer_bottom.pk: 1, } far_out_count = 0 for", "4 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", "matches\" ) def test_invalid_longitude(self): c = Client() response = c.get(", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, }, )", "\"Returns 8 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, } for", "outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, } far_out_count = 0", "longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() # Testing to make sure too small", "mid_right.save() inner_top = Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save()", ") outer_right.save() mid_top = Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"),", "spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing the outer ring", "self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" ) # Testing", "response.status_code, 200, \"Accepts a query with too negative longitude\", )", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 1, }, ) self.assertEquals( response.status_code,", "101, \"limit\": 8, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner, mid", "a unique inner, mid or outer spot\", ) else: far_out_count", "latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left = Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"),", "self.assertEquals( response.status_code, 200, \"Accepts a query with too negative latitude\",", "\"Accepts a query with too negative longitude\", ) self.assertEquals( response[\"Content-Type\"],", "Spot.objects.create( name=\"Far Out %s\" % i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save()", "2 # Testing limits - should get all inner and", "was selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid bottom was selected\" )", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 150, },", "\"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"-40\", \"distance\": \"bad_data\", }, )", "30, \"center_longitude\": 190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts", "self.assertEquals( response.status_code, 200, \"Accepts a query with no params\" )", "or outer spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing that", "import TestCase from django.conf import settings from django.test.client import Client", "self.assertEquals( response.status_code, 200, \"Accepts a query with bad longitude\" )", "c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"-40\", \"distance\": \"bad_data\", },", "that sort by # primary key will give bad results", "spot\", ) spot_ids[spot[\"id\"]] = 2 # testing a limit -", "\"[]\", \"Should return no matches\" ) def test_large_negative_longitude(self): c =", "response.status_code, 200, \"Accepts a query with too negative latitude\", )", "= Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top =", "spot_ids[spot[\"id\"]] = 2 # testing a limit - should get", "that the default limit is 20 spaces response = c.get(", "c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code, 200, \"Accepts a query with no", "mid_left.save() mid_right = Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save()", "for spot in spots: if spot[\"id\"] in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]],", "inner ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "{ inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, }", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_invalid_distance(self):", "spots = json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns 4 spots\") spot_ids =", "\"distance\": 10, }, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "\"Returns 12 spots with a limit of 0\") spot_ids =", "meters away from the center # Far out spots are", ") inner_bottom.save() inner_left = Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), )", "all inner and mid spots, and # 2 outer spots", ") self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left was selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid", "mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, outer_left.pk: 1, outer_right.pk: 1,", "= no limit - get all 12 spots response =", "will give bad results for things that should be #", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, \"limit\":", "center_long, \"distance\": 60, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right = Spot.objects.create( name=\"Outer Right\",", "spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk:", "\"Should return no matches\" ) def test_large_latitude(self): c = Client()", "of 0\" ) spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "\"[]\", \"Should return no matches\" ) def test_invalid_longitude(self): c =", "atlantic to make them less likely to collide # with", "\"Should return no matches\" ) def test_no_params(self): c = Client()", "\"center_longitude\": center_long, \"distance\": 101, \"limit\": 8, }, ) self.assertEquals( response.status_code,", "c.get( \"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals(", "= c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"-40\", \"distance\": \"bad_data\",", "by distance for i in range(0, 100): far_out = Spot.objects.create(", "self.assertEquals(len(spots), 10, \"Returns 10 spots\") spot_ids = { inner_left.pk: 1,", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 150,", "a query with bad longitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "no matches\" ) def test_distances(self): # Spots are in the", "spots with a limit of 0\") spot_ids = { inner_left.pk:", "8 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", "test_invalid_height(self): c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "\"center_latitude\": \"30\", \"center_longitude\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code,", "name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top = Spot.objects.create( name=\"Inner", "101, \"limit\": 10, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "\"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 8,", "20 spots with no defined limit\" ) spot_ids = {", "to collide # with actual spots center_lat = 30.000000 center_long", "the limit of 20\", ) # Testing that with a", "1, outer_top.pk: 1, outer_bottom.pk: 1, } for spot in spots:", "def test_large_negative_latitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\":", "\"Accepts a query with too large latitude\", ) self.assertEquals( response[\"Content-Type\"],", "2, \"Inner left was selected\" ) self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner", "= Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left =", "mid spots, and # 2 outer spots response = c.get(", "key will give bad results for things that should be", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_longitude(self):", "response = c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code, 200, \"Accepts a query", "# Mid spots are 50 meters away from the center", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60,", "\"10\", }, ) self.assertEquals( response.status_code, 200, \"Accepts a query with", "outer_left.pk: 1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, } for", "self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" ) self.assertEquals( response.content.decode(),", "in, so things that sort by # primary key will", "%s\" % i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top = Spot.objects.create(", "mid_top = Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save()", "name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() # Testing to make", "c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"bad_data\",", "longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom = Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"),", "Testing limits - should get all inner and mid spots,", ") # Testing that limit 0 = no limit -", "outer_top.pk: 1, outer_bottom.pk: 1, } for spot in spots: self.assertEquals(", "self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner left was selected\" ) self.assertEquals( spot_ids[inner_right.pk],", "def test_invalid_distance(self): c = Client() response = c.get( \"/api/v1/spot\", {", "spot\", ) spot_ids[spot[\"id\"]] = 2 self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner left", "json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns 4", ") # Testing that with a limit of 0, we", "with a limit of 0, we pull in all spots", ") def test_invalid_distance(self): c = Client() response = c.get( \"/api/v1/spot\",", "\"Returns 4 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "was selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid", "a query with too negative longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "far out spots to fill in the limit of 20\",", "in all spots in range response = c.get( \"/api/v1/spot\", {", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, \"limit\": 6, },", "in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner,", "longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left = Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"),", "too small of a radius returns nothing c = Client()", "ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long,", "\"center_longitude\": center_long, \"distance\": 12, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "= Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\":", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, \"limit\": 6, }, )", "longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom = Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"),", "spots to fill in the limit of 20\", ) #", "mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, outer_left.pk: 1,", "bad latitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top = Spot.objects.create( name=\"Mid Top\",", "outer spot\", ) else: far_out_count += 1 self.assertEquals(far_out_count, 100, \"Found", "far_out_count += 1 self.assertEquals( far_out_count, 8, \"Found 8 far out", "Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom = Spot.objects.create( name=\"Inner Bottom\",", "20, \"Returns 20 spots with no defined limit\" ) spot_ids", "the distance query\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "no matches\" ) def test_invalid_longitude(self): c = Client() response =", "2 outer spots response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "\"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\": 190, \"distance\": 10}, ) self.assertEquals( response.status_code,", "django.test.client import Client from spotseeker_server.models import Spot import simplejson as", "json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns 10 spots\") spot_ids = { inner_left.pk:", "returns nothing c = Client() response = c.get( \"/api/v1/spot\", {", "a query with bad distance\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "self.assertEquals( response.status_code, 200, \"Accepts a query with no matches\" )", "= 0 for spot in spots: if spot[\"id\"] in spot_ids:", "nothing c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner spot\" )", "spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots with a", "self.assertEquals( response.status_code, 200, \"Accepts the distance query\" ) self.assertEquals( response[\"Content-Type\"],", "\"Inner top was selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was", "the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns", "of the mid response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "\"distance\": 1, }, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "center # Outer spots are 100 meters away from the", "limit of 0, we pull in all spots in range", "= Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right =", "% i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top = Spot.objects.create( name=\"Outer", "spots = json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns 10 spots\") spot_ids =", "mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, } for spot in", "import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c = Client()", "c.get( \"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\": -190, \"distance\": 10}, ) self.assertEquals(", "self.assertEquals( response.status_code, 200, \"Accepts a query with too negative longitude\",", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, },", "\"center_longitude\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code, 200, \"Accepts", "get all 12 spots response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "2, \"Inner bottom was selected\" ) # Testing limits -", "}, ) self.assertEquals( response.status_code, 200, \"Accepts a query with no", ") mid_bottom.save() mid_left = Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), )", "mid_right = Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top", "latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right = Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"),", "inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, } for spot in", ") # Testing limits - should get all of the", "patch from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self):", "1, \"Spot matches a unique inner spot\" ) spot_ids[spot[\"id\"]] =", "meters away from the center # Outer spots are 100", "latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right = Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"),", "200, \"Accepts a query with no params\" ) self.assertEquals( response[\"Content-Type\"],", "c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "matches a unique inner, mid or outer spot\", ) else:", "latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom = Spot.objects.create(", "2, \"Inner bottom was selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left", "are 10 meters away from the center # Mid spots", "30.000000 center_long = -40.000000 # Inner spots are 10 meters", "Outer spots are 100 meters away from the center #", "center_lat, \"center_longitude\": center_long, \"distance\": 12, }, ) self.assertEquals( response.status_code, 200,", "no outer spots response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "\"30\", \"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals(", "self.assertEquals(len(spots), 12, \"Returns 12 spots with a limit of 0\")", "with too large longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" ) spots =", "10, }, ) self.assertEquals( response.status_code, 200, \"Accepts a query with", "header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots\")", "self.assertEquals(len(spots), 12, \"Returns 12 spots\") spot_ids = { inner_left.pk: 1,", "was selected\" ) self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner right was selected\"", "spots are 120 meters away, at the north # Creating", "of Washington # SPDX-License-Identifier: Apache-2.0 from django.test import TestCase from", "inner_bottom.save() inner_left = Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save()", "unique inner, mid or outer spot\", ) spot_ids[spot[\"id\"]] = 2", "-40, \"height_from_sea_level\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code, 200,", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_negative_longitude(self):", "200, \"Accepts a query with too large longitude\", ) self.assertEquals(", "and mid, but # no outer spots response = c.get(", "return no matches\" ) def test_distances(self): # Spots are in", "mid_left = Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right", "test_large_longitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 30,", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_latitude(self):", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 1, }, ) self.assertEquals(", "Client from spotseeker_server.models import Spot import simplejson as json from", "2, \"Inner top was selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom", "inner and mid spots, and # 2 outer spots response", "was selected\" ) # Testing that limit 0 = no", "\"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 4,", "return no matches\" ) def test_large_latitude(self): c = Client() response", "distance for i in range(0, 100): far_out = Spot.objects.create( name=\"Far", "Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() # Testing to", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, }, )", ") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1,", "self.assertEquals( far_out_count, 8, \"Found 8 far out spots to fill", "112 spots with a limit of 0\" ) spot_ids =", ") # Testing the inner ring response = c.get( \"/api/v1/spot\",", "130, \"limit\": 0, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "center_lat, \"center_longitude\": center_long, \"distance\": 150, }, ) self.assertEquals( response.status_code, 200,", "by # primary key will give bad results for things", "Washington # SPDX-License-Identifier: Apache-2.0 from django.test import TestCase from django.conf", "c.get( \"/api/v1/spot\", { \"center_latitude\": \"bad_data\", \"center_longitude\": -40, \"distance\": 10, },", "for i in range(0, 100): far_out = Spot.objects.create( name=\"Far Out", "query with bad height\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right = Spot.objects.create(", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_latitude(self): c", "mid_top.pk: 1, mid_bottom.pk: 1, outer_left.pk: 1, outer_right.pk: 1, outer_top.pk: 1,", "mid or outer spot\", ) spot_ids[spot[\"id\"]] = 2 # testing", "= Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top =", "meters away, at the north # Creating these from the", "settings from django.test.client import Client from spotseeker_server.models import Spot import", "inner_bottom = Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left", ") spots = json.loads(response.content) self.assertEquals( len(spots), 20, \"Returns 20 spots", "\"[]\", \"Should return no matches\" ) # Testing the inner", "no matches\" ) def test_invalid_distance(self): c = Client() response =", "json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots with a limit of", "matches\" ) def test_no_params(self): c = Client() response = c.get(\"/api/v1/spot\",", "mid, but # no outer spots response = c.get( \"/api/v1/spot\",", "Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom = Spot.objects.create(", "in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner,", "spots = json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns 8 spots\") spot_ids =", "center_long, \"distance\": 12, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_no_params(self):", "inner_left = Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right", "limit of 0\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "import simplejson as json from decimal import * from django.test.utils", ") self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner right was selected\" ) self.assertEquals(spot_ids[inner_top.pk],", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, \"limit\": 0, }, )", "spots with no defined limit\" ) spot_ids = { inner_left.pk:", "limit of 20\", ) # Testing that with a limit", "or outer spot\", ) else: far_out_count += 1 self.assertEquals(far_out_count, 100,", "# Testing that limit 0 = no limit - get", ") self.assertEquals( response.status_code, 200, \"Accepts a query with bad longitude\"", "latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top = Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\"", "query with too negative latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "bottom was selected\" ) # Testing that limit 0 =", "far_out_count = 0 for spot in spots: if spot[\"id\"] in", "the mid response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "200, \"Accepts a query with bad longitude\" ) self.assertEquals( response[\"Content-Type\"],", "inner_top.save() inner_bottom = Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save()", "def test_invalid_longitude(self): c = Client() response = c.get( \"/api/v1/spot\", {", ") outer_left.save() outer_right = Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), )", ") mid_left.save() mid_right = Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), )", "the north # Creating these from the outside in, so", "small of a radius returns nothing c = Client() response", "6, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", "\"distance\": 110, \"limit\": 0, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long,", "# primary key will give bad results for things that", "\"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code, 200, \"Accepts a", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, \"limit\":", "header\" ) spots = json.loads(response.content) self.assertEquals( len(spots), 20, \"Returns 20", "away, at the north # Creating these from the outside", "inner 4, and any 2 of the mid response =", "SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c = Client() response = c.get( \"/api/v1/spot\",", "1, \"Spot matches a unique inner or mid spot\", )", "left was selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2,", "far_out_count, 8, \"Found 8 far out spots to fill in", "self.assertEquals( response.status_code, 200, \"Accepts a query with bad height\" )", "query with bad distance\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" ) # Testing limits", "of 20\", ) # Testing that with a limit of", "Mid spots are 50 meters away from the center #", "Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom = Spot.objects.create( name=\"Mid", "a query with no params\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "a query with too large longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns 8 spots\") spot_ids = { inner_left.pk:", "or outer spot\", ) else: far_out_count += 1 self.assertEquals( far_out_count,", "10, \"Returns 10 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk:", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) # Testing", ") def test_large_latitude(self): c = Client() response = c.get( \"/api/v1/spot\",", "django.conf import settings from django.test.client import Client from spotseeker_server.models import", ") self.assertEquals( response.status_code, 200, \"Accepts a query with bad distance\"", ") else: far_out_count += 1 self.assertEquals( far_out_count, 8, \"Found 8", "10 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", ") self.assertEquals( response.status_code, 200, \"Accepts a query with no matches\"", "2, \"Inner right was selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_invalid_longitude(self): c", "mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, } for", ") spots = json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns 10 spots\") spot_ids", "test_invalid_distance(self): c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "inner_bottom.pk: 1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1,", "outer_bottom.save() outer_left = Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save()", "def test_large_longitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\":", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_distances(self):", "from the center # Far out spots are 120 meters", ") self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top was selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2,", "spot_ids[inner_right.pk], 2, \"Inner right was selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner", "actual spots center_lat = 30.000000 center_long = -40.000000 # Inner", "name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom = Spot.objects.create( name=\"Outer", "outer_bottom.pk: 1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1,", "get the inner 4, and any 2 of the mid", "response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": -40, \"height_from_sea_level\":", "\"Returns 12 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "mid_bottom.save() mid_left = Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save()", ") spot_ids[spot[\"id\"]] = 2 # Testing the mid ring response", ") spot_ids[spot[\"id\"]] = 2 # Testing limits - should get", "TestCase from django.conf import settings from django.test.client import Client from", "Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"bad_data\", \"center_longitude\": -40,", "to make them less likely to collide # with actual", "to fill in the limit of 20\", ) # Testing", "= c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code, 200, \"Accepts a query with", "center_lat = 30.000000 center_long = -40.000000 # Inner spots are", "self.assertEquals( len(spots), 112, \"Returns 112 spots with a limit of", "mid or outer spot\", ) else: far_out_count += 1 self.assertEquals(far_out_count,", "# with actual spots center_lat = 30.000000 center_long = -40.000000", "give bad results for things that should be # sorted", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_distances(self): #", "1, mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, }", "outer spots response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom = Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"),", "+= 1 self.assertEquals( far_out_count, 8, \"Found 8 far out spots", "spots in range response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "too negative latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "\"Accepts a query with too negative latitude\", ) self.assertEquals( response[\"Content-Type\"],", "limit of 0\" ) spot_ids = { inner_left.pk: 1, inner_right.pk:", "json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12", "testing a limit - should get the inner 4, and", "override_settings from mock import patch from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\")", "60, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", "# Testing that with a limit of 0, we pull", "\"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 10,", "\"Accepts the distance query\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "no limit - get all 12 spots response = c.get(", "response.status_code, 200, \"Accepts a query with no params\" ) self.assertEquals(", "# Outer spots are 100 meters away from the center", "longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top = Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"),", "= Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\": -40,", "name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left = Spot.objects.create( name=\"Mid", "return no matches\" ) def test_large_negative_latitude(self): c = Client() response", "spot[\"id\"] in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique", "too large longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "header\" ) self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" )", "self.assertEquals(len(spots), 8, \"Returns 8 spots\") spot_ids = { inner_left.pk: 1,", "far_out = Spot.objects.create( name=\"Far Out %s\" % i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"),", "1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, } for spot", "that should be # sorted by distance for i in", "out spots to fill in the limit of 20\", )", "spot\" ) spot_ids[spot[\"id\"]] = 2 # Testing the mid ring", "outer_left.save() outer_right = Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save()", "2 # testing a limit - should get the inner", "200, \"Accepts a query with bad distance\" ) self.assertEquals( response[\"Content-Type\"],", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_negative_latitude(self):", ") self.assertEquals( response.status_code, 200, \"Accepts a query with bad latitude\"", "are in the atlantic to make them less likely to", "default limit is 20 spaces response = c.get( \"/api/v1/spot\", {", "\"-40\", \"distance\": \"bad_data\", }, ) self.assertEquals( response.status_code, 200, \"Accepts a", "query with too negative longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "the outer ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "1, mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, outer_left.pk:", "0 for spot in spots: if spot[\"id\"] in spot_ids: self.assertEquals(", "a limit of 0\" ) spot_ids = { inner_left.pk: 1,", "Testing that limit 0 = no limit - get all", "inner, mid or outer spot\", ) spot_ids[spot[\"id\"]] = 2 #", "Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"-40\",", "spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing limits - should", "# Testing limits - should get all inner and mid", "center_lat, \"center_longitude\": center_long, \"distance\": 1, }, ) self.assertEquals( response.status_code, 200,", "the atlantic to make them less likely to collide #", "\"center_longitude\": center_long, \"distance\": 101, \"limit\": 10, }, ) self.assertEquals( response.status_code,", "self.assertEquals( response.status_code, 200, \"Accepts a query with bad distance\" )", "json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns 4 spots\") spot_ids = { inner_left.pk:", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, }, ) self.assertEquals(", "spot\", ) else: far_out_count += 1 self.assertEquals( far_out_count, 8, \"Found", "from the outside in, so things that sort by #", "= 2 self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner left was selected\" )", "a radius returns nothing c = Client() response = c.get(", "longitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "from spotseeker_server.models import Spot import simplejson as json from decimal", "{\"center_latitude\": -100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code, 200,", "too large latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "1, \"Spot matches a unique inner, mid or outer spot\",", "\"distance\": 60, \"limit\": 6, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "center_lat, \"center_longitude\": center_long, \"distance\": 60, }, ) self.assertEquals( response.status_code, 200,", "self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2,", "{\"center_latitude\": 40, \"center_longitude\": -190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200,", "in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner", "spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c =", "name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right = Spot.objects.create( name=\"Mid", "spots are 100 meters away from the center # Far", "mid_bottom.pk: 1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1,", "110, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", "limits - should get all of the inner and mid,", "12, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", "longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right = Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"),", "mid_top.save() mid_bottom = Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save()", "spots = json.loads(response.content) self.assertEquals( len(spots), 112, \"Returns 112 spots with", "top was selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\"", "12 spots response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "6 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", "1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, mid_left.pk: 1, mid_right.pk:", ") spots = json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns 6 spots\") spot_ids", "Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top = Spot.objects.create(", "away from the center # Far out spots are 120", "inner, mid or outer spot\", ) else: far_out_count += 1", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101,", "- should get the inner 4, and any 2 of", "\"center_longitude\": -190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a", "response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"-40\", \"distance\":", "8, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", ") spot_ids[spot[\"id\"]] = 2 # testing a limit - should", "mid ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top was selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid", "away from the center # Mid spots are 50 meters", "a unique inner spot\" ) spot_ids[spot[\"id\"]] = 2 # Testing", "= Spot.objects.create( name=\"Far Out %s\" % i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), )", "header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns 8 spots\")", "spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots\") spot_ids =", "= Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom =", "with too negative longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "any 2 of the mid response = c.get( \"/api/v1/spot\", {", "spot_ids[spot[\"id\"]] = 2 # Testing limits - should get all", "= json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots with a limit", "len(spots), 20, \"Returns 20 spots with no defined limit\" )", "negative latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom = Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"),", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110,", "self.assertEquals( response.status_code, 200, \"Accepts a query with too large longitude\",", "out spots are 120 meters away, at the north #", "1, inner_top.pk: 1, inner_bottom.pk: 1, } for spot in spots:", "\"center_longitude\": center_long, \"distance\": 110, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner spot\" ) spot_ids[spot[\"id\"]]", "in spots: if spot[\"id\"] in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot", "json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns 8", "\"30\", \"center_longitude\": \"-40\", \"distance\": \"bad_data\", }, ) self.assertEquals( response.status_code, 200,", "\"Should return no matches\" ) def test_large_negative_longitude(self): c = Client()", "name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right = Spot.objects.create( name=\"Outer", "spot_ids[spot[\"id\"]] = 2 # Testing the mid ring response =", "spots with a limit of 0\" ) spot_ids = {", "2, \"Mid top was selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid bottom", "response = c.get( \"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\": -40, \"distance\": 10},", "response.status_code, 200, \"Accepts a query with too large longitude\", )", "-100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts", "center # Far out spots are 120 meters away, at", "that with a limit of 0, we pull in all", "self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner right was selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2,", "spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid", "json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots\") spot_ids = { inner_left.pk:", "center_long, \"distance\": 130, \"limit\": 0, }, ) self.assertEquals( response.status_code, 200,", "unique inner spot\" ) spot_ids[spot[\"id\"]] = 2 # Testing the", "bad longitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "no params\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "\"application/json\", \"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots),", "Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left = Spot.objects.create( name=\"Outer Left\",", ") mid_top.save() mid_bottom = Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), )", "with bad latitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "should be # sorted by distance for i in range(0,", "all 12 spots response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "= 2 # Testing that the default limit is 20", "def test_large_latitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\":", "\"height_from_sea_level\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code, 200, \"Accepts", "}, ) self.assertEquals( response.status_code, 200, \"Accepts a query with bad", "spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique", "\"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top was selected\") self.assertEquals(", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 130, \"limit\": 0, },", "center_long, \"distance\": 101, \"limit\": 8, }, ) self.assertEquals( response.status_code, 200,", ") spots = json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns 8 spots\") spot_ids", "Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"bad_data\",", "def test_large_negative_longitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\":", "{ \"center_latitude\": \"bad_data\", \"center_longitude\": -40, \"distance\": 10, }, ) self.assertEquals(", "bad height\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner, mid or", "models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c = Client() response", "response = c.get( \"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\": -190, \"distance\": 10},", "self.assertEquals( response.status_code, 200, \"Accepts a query with too large latitude\",", "response.status_code, 200, \"Accepts a query with bad height\" ) self.assertEquals(", "north # Creating these from the outside in, so things", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, \"limit\": 6,", ") def test_large_negative_longitude(self): c = Client() response = c.get( \"/api/v1/spot\",", "spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1,", "too negative longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing that the default", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 150, }, ) self.assertEquals(", "bad results for things that should be # sorted by", "results for things that should be # sorted by distance", "in the limit of 20\", ) # Testing that with", "center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 10, }, ) self.assertEquals(", "\"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 12,", "latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left = Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"),", "\"application/json\", \"Has the json header\" ) self.assertEquals( response.content.decode(), \"[]\", \"Should", "{\"center_latitude\": 100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code, 200,", "Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right = Spot.objects.create( name=\"Mid Right\",", "1, outer_left.pk: 1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, }", "meters away from the center # Mid spots are 50", "\"center_longitude\": center_long, \"distance\": 130, \"limit\": 0, }, ) self.assertEquals( response.status_code,", "params\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "outer_right.save() mid_top = Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), )", "the inner and mid, but # no outer spots response", "mid spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing limits -", "\"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code,", "spots: if spot[\"id\"] in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 1,", "\"Mid top was selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid bottom was", "2 # Testing that the default limit is 20 spaces", "spot\", ) else: far_out_count += 1 self.assertEquals(far_out_count, 100, \"Found all", "= 2 # Testing the outer ring response = c.get(", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 12, }, )", "of the inner and mid, but # no outer spots", ") def test_invalid_longitude(self): c = Client() response = c.get( \"/api/v1/spot\",", "i in range(0, 100): far_out = Spot.objects.create( name=\"Far Out %s\"", ") self.assertEquals( response.status_code, 200, \"Accepts a query with too negative", "json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns 6 spots\") spot_ids = { inner_left.pk:", "inner_left.save() inner_right = Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save()", "SPDX-License-Identifier: Apache-2.0 from django.test import TestCase from django.conf import settings", "was selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top was selected\") self.assertEquals(", "the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns", "the inner 4, and any 2 of the mid response", "spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner spot\"", "selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top", "response[\"Content-Type\"], \"application/json\", \"Has the json header\" ) self.assertEquals( response.content.decode(), \"[]\",", "import patch from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def", "latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() # Testing to make sure too", "2, \"Mid left was selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas selected\")", "response.status_code, 200, \"Accepts the distance query\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top = Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"),", "\"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\", \"distance\": \"10\",", "Spots are in the atlantic to make them less likely", ") self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) #", "\"[]\", \"Should return no matches\" ) def test_large_latitude(self): c =", ") self.assertEquals( response.status_code, 200, \"Accepts the distance query\" ) self.assertEquals(", "1, mid_top.pk: 1, mid_bottom.pk: 1, } for spot in spots:", "\"Spot matches a unique inner, mid or outer spot\", )", "Testing limits - should get all of the inner and", "from decimal import * from django.test.utils import override_settings from mock", "\"Should return no matches\" ) def test_large_longitude(self): c = Client()", "* from django.test.utils import override_settings from mock import patch from", "no matches\" ) def test_large_negative_longitude(self): c = Client() response =", "\"Accepts a query with no matches\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", ") outer_bottom.save() outer_left = Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), )", "selected\" ) self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner right was selected\" )", "1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1, outer_left.pk: 1, outer_right.pk:", ") spots = json.loads(response.content) self.assertEquals( len(spots), 112, \"Returns 112 spots", "all spots in range response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\":", "\"/api/v1/spot\", { \"center_latitude\": \"bad_data\", \"center_longitude\": -40, \"distance\": 10, }, )", "190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "from django.test import TestCase from django.conf import settings from django.test.client", "test_large_latitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 100,", "limit 0 = no limit - get all 12 spots", "\"Should return no matches\" ) def test_large_negative_latitude(self): c = Client()", "- should get all of the inner and mid, but", "bad distance\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "\"center_longitude\": center_long, \"distance\": 1, }, ) self.assertEquals( response.status_code, 200, \"Accepts", ") def test_no_params(self): c = Client() response = c.get(\"/api/v1/spot\", {})", "40, \"center_longitude\": -190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts", "Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom = Spot.objects.create( name=\"Outer Bottom\",", ") spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots with", "\"Should return no matches\" ) def test_distances(self): # Spots are", "latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom = Spot.objects.create( name=\"Mid Bottom\",", "and # 2 outer spots response = c.get( \"/api/v1/spot\", {", "-40.000000 # Inner spots are 10 meters away from the", "\"Returns 10 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "a query with bad latitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "Testing that with a limit of 0, we pull in", "2 # Testing the outer ring response = c.get( \"/api/v1/spot\",", "Testing the mid ring response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "= json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns 6 spots\") spot_ids = {", "header\" ) spots = json.loads(response.content) self.assertEquals( len(spots), 112, \"Returns 112", "\"distance\": 130, \"limit\": 0, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "\"center_latitude\": \"bad_data\", \"center_longitude\": -40, \"distance\": 10, }, ) self.assertEquals( response.status_code,", "matches\" ) def test_large_latitude(self): c = Client() response = c.get(", "\"limit\": 10, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "if spot[\"id\"] in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a", "else: far_out_count += 1 self.assertEquals(far_out_count, 100, \"Found all 100 far", "# Testing the mid ring response = c.get( \"/api/v1/spot\", {", ") outer_top.save() outer_bottom = Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), )", "\"Mid bottom was selected\" ) # Testing that limit 0", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_invalid_height(self): c", "should get all inner and mid spots, and # 2", "\"distance\": \"bad_data\", }, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "we pull in all spots in range response = c.get(", "{ \"center_latitude\": \"30\", \"center_longitude\": \"-40\", \"distance\": \"bad_data\", }, ) self.assertEquals(", "are 50 meters away from the center # Outer spots", "{ \"center_latitude\": \"30\", \"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\", \"distance\": \"10\", },", "\"Inner bottom was selected\" ) # Testing limits - should", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 1, }, )", "12 spots with a limit of 0\") spot_ids = {", "= Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"bad_data\", \"center_longitude\":", "outer spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing that the", "100 meters away from the center # Far out spots", "# no outer spots response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top was", "json.loads(response.content) self.assertEquals( len(spots), 20, \"Returns 20 spots with no defined", "= json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns 4 spots\") spot_ids = {", "a unique inner or mid spot\", ) spot_ids[spot[\"id\"]] = 2", "with bad distance\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "spots are 50 meters away from the center # Outer", "query with too large longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "test_invalid_latitude(self): c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "= Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom", "django.test import TestCase from django.conf import settings from django.test.client import", "6, \"Returns 6 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk:", "was selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" )", "inner and mid, but # no outer spots response =", "1, } far_out_count = 0 for spot in spots: if", "test_distances(self): # Spots are in the atlantic to make them", "a query with no matches\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "a limit of 0\") spot_ids = { inner_left.pk: 1, inner_right.pk:", "latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top = Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"),", "fill in the limit of 20\", ) # Testing that", "2, \"Mid rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top was selected\")", "return no matches\" ) def test_invalid_height(self): c = Client() response", "response = c.get( \"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\": 190, \"distance\": 10},", "Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": -40,", "10}, ) self.assertEquals( response.status_code, 200, \"Accepts a query with too", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, \"limit\": 0, },", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_invalid_distance(self): c", "latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top = Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"),", "query with too large latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "outer_top.pk: 1, outer_bottom.pk: 1, } far_out_count = 0 for spot", "# Testing to make sure too small of a radius", "= Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\": 190,", "no matches\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "else: far_out_count += 1 self.assertEquals( far_out_count, 8, \"Found 8 far", "12, \"Returns 12 spots with a limit of 0\") spot_ids", "spaces response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long,", "header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns 4 spots\")", "center_lat, \"center_longitude\": center_long, \"distance\": 130, \"limit\": 0, }, ) self.assertEquals(", "Testing the inner ring response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "the center # Mid spots are 50 meters away from", "= json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns 10 spots\") spot_ids = {", "- get all 12 spots response = c.get( \"/api/v1/spot\", {", "Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left = Spot.objects.create( name=\"Inner Left\",", "spots center_lat = 30.000000 center_long = -40.000000 # Inner spots", "50 meters away from the center # Outer spots are", "Testing the outer ring response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "\"Should return no matches\" ) def test_invalid_height(self): c = Client()", "center_long, \"distance\": 101, \"limit\": 10, }, ) self.assertEquals( response.status_code, 200,", "inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, mid_left.pk: 1,", "longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left = Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"),", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 8, },", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, }, ) self.assertEquals( response.status_code,", "\"Has the json header\" ) spots = json.loads(response.content) self.assertEquals( len(spots),", "spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner, mid or outer", "no matches\" ) def test_no_params(self): c = Client() response =", "to make sure too small of a radius returns nothing", "self.assertEquals( len(spots), 20, \"Returns 20 spots with no defined limit\"", "json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns 10", "112, \"Returns 112 spots with a limit of 0\" )", "outer_left = Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right", "0, we pull in all spots in range response =", "rightwas selected\") self.assertEquals(spot_ids[mid_top.pk], 2, \"Mid top was selected\") self.assertEquals( spot_ids[mid_bottom.pk],", "Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0010372851\"), ) outer_left.save() outer_right = Spot.objects.create(", "matches\" ) def test_large_negative_longitude(self): c = Client() response = c.get(", "pull in all spots in range response = c.get( \"/api/v1/spot\",", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 10, }, )", "make sure too small of a radius returns nothing c", "\"30\", \"center_longitude\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals( response.status_code, 200,", "far_out_count += 1 self.assertEquals(far_out_count, 100, \"Found all 100 far out", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 12, },", "Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left = Spot.objects.create(", "json.loads(response.content) self.assertEquals( len(spots), 112, \"Returns 112 spots with a limit", "= 2 # testing a limit - should get the", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 10,", "bottom was selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left was selected\")", "test_no_params(self): c = Client() response = c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code,", "1, }, ) self.assertEquals( response.status_code, 200, \"Accepts a query with", "center_long = -40.000000 # Inner spots are 10 meters away", "= json.loads(response.content) self.assertEquals(len(spots), 8, \"Returns 8 spots\") spot_ids = {", "200, \"Accepts the distance query\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner or mid spot\",", "4, and any 2 of the mid response = c.get(", "was selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left was selected\") self.assertEquals(spot_ids[mid_right.pk],", "limit - get all 12 spots response = c.get( \"/api/v1/spot\",", "\"distance\": 150, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", ") spot_ids[spot[\"id\"]] = 2 self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner left was", "center_long, \"distance\": 1, }, ) self.assertEquals( response.status_code, 200, \"Accepts a", "longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() # Testing to make sure", "c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\":", "test_large_negative_longitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 40,", ") inner_left.save() inner_right = Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), )", "Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\": -40, \"distance\":", "1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, } for spot", "response = c.get( \"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\": -40, \"distance\": 10},", "= Spot.objects.create( name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right =", "= Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "self.assertEquals( response.status_code, 200, \"Accepts a query with bad latitude\" )", "for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a", "inner spot\" ) spot_ids[spot[\"id\"]] = 2 # Testing the mid", "1 self.assertEquals( far_out_count, 8, \"Found 8 far out spots to", "import Spot import simplejson as json from decimal import *", "import override_settings from mock import patch from spotseeker_server import models", "c = Client() response = c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code, 200,", "mid_bottom = Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left", "1, inner_bottom.pk: 1, mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk:", "\"Accepts a query with bad latitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 8, }, ) self.assertEquals(", "\"[]\", \"Should return no matches\" ) def test_no_params(self): c =", "selected\" ) # Testing that limit 0 = no limit", "from mock import patch from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class", "spot in spots: if spot[\"id\"] in spot_ids: self.assertEquals( spot_ids[spot[\"id\"]], 1,", "\"Spot matches a unique inner spot\" ) spot_ids[spot[\"id\"]] = 2", "header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns 10 spots\")", "return no matches\" ) def test_invalid_longitude(self): c = Client() response", "} far_out_count = 0 for spot in spots: if spot[\"id\"]", "20\", ) # Testing that with a limit of 0,", "= 2 # Testing the mid ring response = c.get(", "defined limit\" ) spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", ") inner_top.save() inner_bottom = Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), )", "Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top = Spot.objects.create( name=\"Inner Top\",", ") self.assertEquals( response.status_code, 200, \"Accepts a query with too large", "outer_bottom.pk: 1, } far_out_count = 0 for spot in spots:", "\"center_longitude\": center_long, \"distance\": 60, \"limit\": 6, }, ) self.assertEquals( response.status_code,", "range response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long,", "@override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c = Client() response =", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 1, },", "with no defined limit\" ) spot_ids = { inner_left.pk: 1,", "{ \"center_latitude\": \"30\", \"center_longitude\": \"bad_data\", \"distance\": \"10\", }, ) self.assertEquals(", "\"Inner left was selected\" ) self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner right", "2, \"Mid bottom was selected\" ) # Testing that limit", "\"distance\": \"10\", }, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "110, \"limit\": 0, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "def test_distances(self): # Spots are in the atlantic to make", "UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 from django.test import", ") self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def", "json header\" ) spots = json.loads(response.content) self.assertEquals( len(spots), 20, \"Returns", "response.content.decode(), \"[]\", \"Should return no matches\" ) # Testing the", "with no params\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "matches\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "spot_ids[mid_bottom.pk], 2, \"Mid bottom was selected\" ) # Testing that", "get all inner and mid spots, and # 2 outer", "large longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "no matches\" ) def test_large_negative_latitude(self): c = Client() response =", "= c.get( \"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\": -40, \"distance\": 10}, )", "query with bad latitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "the outside in, so things that sort by # primary", "the center # Outer spots are 100 meters away from", "Spot import simplejson as json from decimal import * from", "things that sort by # primary key will give bad", "from django.conf import settings from django.test.client import Client from spotseeker_server.models", "{\"center_latitude\": 30, \"center_longitude\": 190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200,", "name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom = Spot.objects.create( name=\"Inner", "selected\" ) # Testing limits - should get all of", "\"[]\", \"Should return no matches\" ) def test_invalid_height(self): c =", "\"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code,", "\"distance\": 101, \"limit\": 10, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left = Spot.objects.create( name=\"Outer Left\", latitude=Decimal(\"30.0\"),", "latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right = Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"),", "# SPDX-License-Identifier: Apache-2.0 from django.test import TestCase from django.conf import", "0\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1,", "or mid spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing the", "make them less likely to collide # with actual spots", "# 2 outer spots response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "2 of the mid response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner, mid", "Inner spots are 10 meters away from the center #", "c.get( \"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\": 190, \"distance\": 10}, ) self.assertEquals(", "limit - should get the inner 4, and any 2", "with a limit of 0\" ) spot_ids = { inner_left.pk:", "\"Should return no matches\" ) def test_invalid_distance(self): c = Client()", "selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" ) #", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 12, }, ) self.assertEquals(", "200, \"Accepts a query with bad height\" ) self.assertEquals( response[\"Content-Type\"],", "}, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\" )", "center_long, \"distance\": 110, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "# Creating these from the outside in, so things that", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 10, },", "= Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\": -190,", "no defined limit\" ) spot_ids = { inner_left.pk: 1, inner_right.pk:", "\"distance\": 110, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "with actual spots center_lat = 30.000000 center_long = -40.000000 #", "center_long, \"distance\": 150, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner or mid", "class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c = Client() response = c.get(", "spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches a unique inner or", "\"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a", "Testing to make sure too small of a radius returns", "Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right = Spot.objects.create(", "spotseeker_server.models import Spot import simplejson as json from decimal import", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 12,", "response.status_code, 200, \"Accepts a query with bad latitude\" ) self.assertEquals(", "# Testing the inner ring response = c.get( \"/api/v1/spot\", {", "c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\":", "outer spot\", ) spot_ids[spot[\"id\"]] = 2 # testing a limit", "\"limit\": 6, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "10 meters away from the center # Mid spots are", "in range(0, 100): far_out = Spot.objects.create( name=\"Far Out %s\" %", "matches\" ) def test_invalid_distance(self): c = Client() response = c.get(", "inner_right = Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() #", "\"Inner bottom was selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left was", "1, outer_bottom.pk: 1, } far_out_count = 0 for spot in", "less likely to collide # with actual spots center_lat =", "at the north # Creating these from the outside in,", "no matches\" ) # Testing the inner ring response =", "outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, } for spot in", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_negative_longitude(self): c", "return no matches\" ) def test_invalid_distance(self): c = Client() response", "center_lat, \"center_longitude\": center_long, \"distance\": 60, \"limit\": 6, }, ) self.assertEquals(", ") def test_distances(self): # Spots are in the atlantic to", "inner_top = Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom", "Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left = Spot.objects.create( name=\"Mid Left\",", "left was selected\" ) self.assertEquals( spot_ids[inner_right.pk], 2, \"Inner right was", "-40, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "the json header\" ) spots = json.loads(response.content) self.assertEquals( len(spots), 112,", "self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left was selected\") self.assertEquals(spot_ids[mid_right.pk], 2, \"Mid rightwas", "\"bad_data\", }, ) self.assertEquals( response.status_code, 200, \"Accepts a query with", "sure too small of a radius returns nothing c =", "{ inner_left.pk: 1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, mid_left.pk:", "200, \"Accepts a query with no matches\" ) self.assertEquals( response[\"Content-Type\"],", "response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"bad_data\", \"center_longitude\": -40, \"distance\":", "matches a unique inner spot\" ) spot_ids[spot[\"id\"]] = 2 #", "= c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 130,", "= Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left =", "outer ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top = Spot.objects.create( name=\"Mid", "inner_top.pk: 1, inner_bottom.pk: 1, } for spot in spots: self.assertEquals(", "simplejson as json from decimal import * from django.test.utils import", "the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns", "1, inner_bottom.pk: 1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]],", "center_lat, \"center_longitude\": center_long, \"distance\": 110, }, ) self.assertEquals( response.status_code, 200,", "self.assertEquals(len(spots), 6, \"Returns 6 spots\") spot_ids = { inner_left.pk: 1,", "latitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" )", "\"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"bad_data\", \"distance\": \"10\", }, )", "radius returns nothing c = Client() response = c.get( \"/api/v1/spot\",", "with too large latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "c.get( \"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals(", "unique inner or mid spot\", ) spot_ids[spot[\"id\"]] = 2 #", "Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"), ) mid_right.save() inner_top = Spot.objects.create(", "the default limit is 20 spaces response = c.get( \"/api/v1/spot\",", "\"distance\": 101, \"limit\": 8, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "with a limit of 0\") spot_ids = { inner_left.pk: 1,", "\"center_longitude\": center_long, \"distance\": 150, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 10, \"Returns", ") mid_right.save() inner_top = Spot.objects.create( name=\"Inner Top\", latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), )", "in range response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\":", "\"Accepts a query with no params\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "import * from django.test.utils import override_settings from mock import patch", "\"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code,", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 60, }, ) self.assertEquals( response.status_code,", "header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots", "from the center # Mid spots are 50 meters away", "mid or outer spot\", ) else: far_out_count += 1 self.assertEquals(", "\"limit\": 8, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "response.status_code, 200, \"Accepts a query with no matches\" ) self.assertEquals(", "Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\": -40, \"distance\":", "the center # Far out spots are 120 meters away,", "import settings from django.test.client import Client from spotseeker_server.models import Spot", "json from decimal import * from django.test.utils import override_settings from", "c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\":", "100): far_out = Spot.objects.create( name=\"Far Out %s\" % i, latitude=Decimal(\"30.0010779783\"),", "outer_top = Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save() outer_bottom", "name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom = Spot.objects.create(", "mid_top.pk: 1, mid_bottom.pk: 1, } for spot in spots: self.assertEquals(", "= Spot.objects.create( name=\"Mid Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right =", "1, mid_bottom.pk: 1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]],", "c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\", \"distance\":", "-190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a query", "the json header\" ) spots = json.loads(response.content) self.assertEquals( len(spots), 20,", "limits - should get all inner and mid spots, and", "8, \"Returns 8 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk:", "12, \"Returns 12 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk:", "Spot.objects.create( name=\"Mid Top\", latitude=Decimal(\" 30.0004491576\"), longitude=Decimal(\"-40.0\"), ) mid_top.save() mid_bottom =", "matches\" ) def test_invalid_height(self): c = Client() response = c.get(", "matches\" ) def test_large_negative_latitude(self): c = Client() response = c.get(", "{}) self.assertEquals( response.status_code, 200, \"Accepts a query with no params\"", "10, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", ") self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" ) self.assertEquals(", "matches\" ) def test_distances(self): # Spots are in the atlantic", "return no matches\" ) # Testing the inner ring response", "be # sorted by distance for i in range(0, 100):", "response.status_code, 200, \"Accepts a query with bad longitude\" ) self.assertEquals(", ") self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\" ) spots", "a unique inner, mid or outer spot\", ) spot_ids[spot[\"id\"]] =", "= c.get( \"/api/v1/spot\", {\"center_latitude\": -100, \"center_longitude\": -40, \"distance\": 10}, )", "query with bad longitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "200, \"Accepts a query with too large latitude\", ) self.assertEquals(", "\"Spot matches a unique inner or mid spot\", ) spot_ids[spot[\"id\"]]", "spots = json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns 6 spots\") spot_ids =", "# Testing limits - should get all of the inner", "center_long, \"distance\": 60, \"limit\": 6, }, ) self.assertEquals( response.status_code, 200,", "spot_ids[spot[\"id\"]] = 2 # Testing the outer ring response =", "inner or mid spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing", "center_lat, \"center_longitude\": center_long, \"distance\": 110, \"limit\": 0, }, ) self.assertEquals(", "0, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance query\"", "are 100 meters away from the center # Far out", ") else: far_out_count += 1 self.assertEquals(far_out_count, 100, \"Found all 100", "# Testing the outer ring response = c.get( \"/api/v1/spot\", {", "self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top was selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner", "# Testing that the default limit is 20 spaces response", "def test_invalid_height(self): c = Client() response = c.get( \"/api/v1/spot\", {", "outer spot\", ) spot_ids[spot[\"id\"]] = 2 self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner", "# sorted by distance for i in range(0, 100): far_out", "0 = no limit - get all 12 spots response", "spot_ids[spot[\"id\"]] = 2 # Testing that the default limit is", "is 20 spaces response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "outer_left.pk: 1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1, } far_out_count", "+= 1 self.assertEquals(far_out_count, 100, \"Found all 100 far out spots\")", "inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, mid_left.pk: 1, mid_right.pk: 1,", "spot_ids[spot[\"id\"]] = 2 self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner left was selected\"", "of 0\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", "= json.loads(response.content) self.assertEquals( len(spots), 112, \"Returns 112 spots with a", ") spot_ids[spot[\"id\"]] = 2 # Testing that the default limit", ") spot_ids[spot[\"id\"]] = 2 # Testing the outer ring response", "response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\":", "and mid spots, and # 2 outer spots response =", "# Inner spots are 10 meters away from the center", "Apache-2.0 from django.test import TestCase from django.conf import settings from", "unique inner, mid or outer spot\", ) else: far_out_count +=", "from spotseeker_server import models @override_settings(SPOTSEEKER_AUTH_MODULE=\"spotseeker_server.auth.all_ok\") class SpotSearchDistanceTest(TestCase): def test_invalid_latitude(self): c", "json header\" ) self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\"", "with bad longitude\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "\"center_longitude\": \"-40\", \"distance\": \"bad_data\", }, ) self.assertEquals( response.status_code, 200, \"Accepts", "# Spots are in the atlantic to make them less", "large latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "name=\"Far Out %s\" % i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top", "longitude=Decimal(\"-40.0005186426\"), ) mid_left.save() mid_right = Spot.objects.create( name=\"Mid Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9994813574\"),", "and any 2 of the mid response = c.get( \"/api/v1/spot\",", "test_invalid_longitude(self): c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "that limit 0 = no limit - get all 12", "no matches\" ) def test_large_longitude(self): c = Client() response =", "limit\" ) spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", "= json.loads(response.content) self.assertEquals( len(spots), 20, \"Returns 20 spots with no", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_no_params(self): c", "likely to collide # with actual spots center_lat = 30.000000", "= json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots\") spot_ids = {", "was selected\" ) # Testing limits - should get all", "the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns", "outer_bottom = Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left", "the mid ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "a limit of 0, we pull in all spots in", "or outer spot\", ) spot_ids[spot[\"id\"]] = 2 # testing a", "spots, and # 2 outer spots response = c.get( \"/api/v1/spot\",", "spot_ids[inner_left.pk], 2, \"Inner left was selected\" ) self.assertEquals( spot_ids[inner_right.pk], 2,", "c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": \"bad_data\", \"distance\": \"10\", },", "for things that should be # sorted by distance for", "a query with too negative latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "center_long, \"distance\": 110, \"limit\": 0, }, ) self.assertEquals( response.status_code, 200,", "= -40.000000 # Inner spots are 10 meters away from", "12 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", "with too negative latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "return no matches\" ) def test_no_params(self): c = Client() response", "outer spot\", ) else: far_out_count += 1 self.assertEquals( far_out_count, 8,", "60, \"limit\": 6, }, ) self.assertEquals( response.status_code, 200, \"Accepts the", "Far out spots are 120 meters away, at the north", "= Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"), ) inner_right.save() # Testing", "\"limit\": 0, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "-40, \"distance\": 10, }, ) self.assertEquals( response.status_code, 200, \"Accepts a", "the json header\" ) self.assertEquals( response.content.decode(), \"[]\", \"Should return no", "from the center # Outer spots are 100 meters away", "1, } for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot", "Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\": -190, \"distance\":", "are 120 meters away, at the north # Creating these", "Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right = Spot.objects.create( name=\"Inner Right\",", "negative longitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "\"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 6,", "\"Inner right was selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top was", "Out %s\" % i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top =", "Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\": 190, \"distance\":", "} for spot in spots: self.assertEquals( spot_ids[spot[\"id\"]], 1, \"Spot matches", "1, mid_top.pk: 1, mid_bottom.pk: 1, outer_left.pk: 1, outer_right.pk: 1, outer_top.pk:", "\"center_longitude\": 190, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a", "the inner ring response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat,", "name=\"Inner Left\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right = Spot.objects.create( name=\"Inner", "100, \"center_longitude\": -40, \"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts", "a query with too large latitude\", ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "= Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left =", "them less likely to collide # with actual spots center_lat", "2 # Testing the mid ring response = c.get( \"/api/v1/spot\",", "selected\" ) self.assertEquals(spot_ids[mid_left.pk], 2, \"Mid left was selected\") self.assertEquals(spot_ids[mid_right.pk], 2,", "limit is 20 spaces response = c.get( \"/api/v1/spot\", { \"center_latitude\":", "\"center_latitude\": \"30\", \"center_longitude\": \"-40\", \"distance\": \"bad_data\", }, ) self.assertEquals( response.status_code,", "200, \"Accepts a query with too negative latitude\", ) self.assertEquals(", "far_out.save() outer_top = Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), ) outer_top.save()", "len(spots), 112, \"Returns 112 spots with a limit of 0\"", "longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom = Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"),", "\"Should return no matches\" ) # Testing the inner ring", "distance query\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json header\"", "120 meters away, at the north # Creating these from", "4, \"Returns 4 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk:", "spots are 10 meters away from the center # Mid", "Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left = Spot.objects.create(", "collide # with actual spots center_lat = 30.000000 center_long =", "no matches\" ) def test_invalid_height(self): c = Client() response =", "= Client() response = c.get(\"/api/v1/spot\", {}) self.assertEquals( response.status_code, 200, \"Accepts", "outer_right = Spot.objects.create( name=\"Outer Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9989627149\"), ) outer_right.save() mid_top", "\"Accepts a query with bad height\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 150, }, ) self.assertEquals( response.status_code,", "response.status_code, 200, \"Accepts a query with bad distance\" ) self.assertEquals(", "top was selected\") self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid bottom was selected\"", "Testing that the default limit is 20 spaces response =", ") def test_invalid_height(self): c = Client() response = c.get( \"/api/v1/spot\",", "longitude=Decimal(\"-40.0001037285\"), ) inner_left.save() inner_right = Spot.objects.create( name=\"Inner Right\", latitude=Decimal(\"30.0\"), longitude=Decimal(\"-39.9998962715\"),", "def test_invalid_latitude(self): c = Client() response = c.get( \"/api/v1/spot\", {", "\"Returns 112 spots with a limit of 0\" ) spot_ids", "a query with bad height\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has", "but # no outer spots response = c.get( \"/api/v1/spot\", {", "with bad height\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save() outer_left = Spot.objects.create( name=\"Outer", "mid or outer spot\", ) spot_ids[spot[\"id\"]] = 2 self.assertEquals( spot_ids[inner_left.pk],", "\"Returns 20 spots with no defined limit\" ) spot_ids =", "these from the outside in, so things that sort by", "from django.test.client import Client from spotseeker_server.models import Spot import simplejson", "return no matches\" ) def test_large_negative_longitude(self): c = Client() response", "with no matches\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the json", "primary key will give bad results for things that should", "= c.get( \"/api/v1/spot\", {\"center_latitude\": 40, \"center_longitude\": -190, \"distance\": 10}, )", "\"[]\", \"Should return no matches\" ) def test_large_longitude(self): c =", "Spot.objects.create( name=\"Mid Bottom\", latitude=Decimal(\"29.9995508424\"), longitude=Decimal(\"-40.0\"), ) mid_bottom.save() mid_left = Spot.objects.create(", "from django.test.utils import override_settings from mock import patch from spotseeker_server", "1, inner_right.pk: 1, inner_top.pk: 1, inner_bottom.pk: 1, } for spot", "so things that sort by # primary key will give", "2 self.assertEquals( spot_ids[inner_left.pk], 2, \"Inner left was selected\" ) self.assertEquals(", "range(0, 100): far_out = Spot.objects.create( name=\"Far Out %s\" % i,", "of a radius returns nothing c = Client() response =", "<reponame>uw-it-aca/spotseeker_server # Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier:", "json header\" ) spots = json.loads(response.content) self.assertEquals( len(spots), 112, \"Returns", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_invalid_longitude(self):", "\"center_latitude\": \"30\", \"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\", \"distance\": \"10\", }, )", "decimal import * from django.test.utils import override_settings from mock import", "as json from decimal import * from django.test.utils import override_settings", "\"Accepts a query with bad distance\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\",", "name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"), longitude=Decimal(\"-40.0\"), ) inner_bottom.save() inner_left = Spot.objects.create( name=\"Inner", "inner_bottom.pk: 1, mid_left.pk: 1, mid_right.pk: 1, mid_top.pk: 1, mid_bottom.pk: 1,", "things that should be # sorted by distance for i", "= 2 # Testing limits - should get all inner", "return no matches\" ) def test_large_longitude(self): c = Client() response", "spots = json.loads(response.content) self.assertEquals( len(spots), 20, \"Returns 20 spots with", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 130, \"limit\": 0, }, )", "sort by # primary key will give bad results for", "in the atlantic to make them less likely to collide", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_negative_latitude(self): c", "get all of the inner and mid, but # no", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 130, \"limit\": 0,", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 8,", "= c.get( \"/api/v1/spot\", { \"center_latitude\": \"bad_data\", \"center_longitude\": -40, \"distance\": 10,", "center # Mid spots are 50 meters away from the", "\"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\": 8, }, )", "selected\") self.assertEquals( spot_ids[inner_bottom.pk], 2, \"Inner bottom was selected\" ) self.assertEquals(spot_ids[mid_left.pk],", "8 far out spots to fill in the limit of", "= c.get( \"/api/v1/spot\", {\"center_latitude\": 30, \"center_longitude\": 190, \"distance\": 10}, )", "\"[]\", \"Should return no matches\" ) def test_distances(self): # Spots", "# Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0", ") far_out.save() outer_top = Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"), )", "longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top = Spot.objects.create( name=\"Outer Top\", latitude=Decimal(\"30.0008983153\"), longitude=Decimal(\"-40.0\"),", "query with no matches\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "1, mid_bottom.pk: 1, outer_left.pk: 1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk:", "bottom was selected\" ) # Testing limits - should get", "outer_top.save() outer_bottom = Spot.objects.create( name=\"Outer Bottom\", latitude=Decimal(\"29.9991016847\"), longitude=Decimal(\"-40.0\"), ) outer_bottom.save()", "Creating these from the outside in, so things that sort", "= c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\", \"center_longitude\": -40, \"height_from_sea_level\": \"bad_data\",", "should get all of the inner and mid, but #", "response.content.decode(), \"[]\", \"Should return no matches\" ) def test_large_longitude(self): c", "\"Accepts a query with too large longitude\", ) self.assertEquals( response[\"Content-Type\"],", "{ \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, }, ) self.assertEquals(", "mid or outer spot\", ) spot_ids[spot[\"id\"]] = 2 # Testing", "away from the center # Outer spots are 100 meters", "\"Has the json header\" ) self.assertEquals( response.content.decode(), \"[]\", \"Should return", "= Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": 100, \"center_longitude\": -40,", "\"[]\", \"Should return no matches\" ) def test_large_negative_latitude(self): c =", "i, latitude=Decimal(\"30.0010779783\"), longitude=Decimal(\"-40.0\"), ) far_out.save() outer_top = Spot.objects.create( name=\"Outer Top\",", "query with no params\" ) self.assertEquals( response[\"Content-Type\"], \"application/json\", \"Has the", "1, inner_top.pk: 1, inner_bottom.pk: 1, mid_left.pk: 1, mid_right.pk: 1, mid_top.pk:", "of 0, we pull in all spots in range response", "Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 from", "200, \"Accepts a query with too negative longitude\", ) self.assertEquals(", "response[\"Content-Type\"], \"application/json\", \"Has the json header\" ) spots = json.loads(response.content)", "8, \"Found 8 far out spots to fill in the", "c = Client() response = c.get( \"/api/v1/spot\", { \"center_latitude\": \"30\",", "no matches\" ) def test_large_latitude(self): c = Client() response =", ") def test_large_negative_latitude(self): c = Client() response = c.get( \"/api/v1/spot\",", "test_large_negative_latitude(self): c = Client() response = c.get( \"/api/v1/spot\", {\"center_latitude\": -100,", "import Client from spotseeker_server.models import Spot import simplejson as json", "self.assertEquals( response.content.decode(), \"[]\", \"Should return no matches\" ) def test_invalid_height(self):", ") spots = json.loads(response.content) self.assertEquals(len(spots), 12, \"Returns 12 spots\") spot_ids", "spots response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long,", "mid_bottom.pk: 1, outer_left.pk: 1, outer_right.pk: 1, outer_top.pk: 1, outer_bottom.pk: 1,", "a limit - should get the inner 4, and any", "- should get all inner and mid spots, and #", "\"center_longitude\": center_long, \"distance\": 60, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "django.test.utils import override_settings from mock import patch from spotseeker_server import", "right was selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top was selected\")", "0\" ) spot_ids = { inner_left.pk: 1, inner_right.pk: 1, inner_top.pk:", ") spots = json.loads(response.content) self.assertEquals(len(spots), 4, \"Returns 4 spots\") spot_ids", "\"[]\", \"Should return no matches\" ) def test_invalid_distance(self): c =", "200, \"Accepts a query with bad latitude\" ) self.assertEquals( response[\"Content-Type\"],", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 101, \"limit\":", "latitude=Decimal(\"30.0000898315\"), longitude=Decimal(\"-40.0\"), ) inner_top.save() inner_bottom = Spot.objects.create( name=\"Inner Bottom\", latitude=Decimal(\"29.9999101685\"),", ") inner_right.save() # Testing to make sure too small of", "should get the inner 4, and any 2 of the", "mid response = c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long,", "# Far out spots are 120 meters away, at the", "selected\" ) self.assertEquals(spot_ids[inner_top.pk], 2, \"Inner top was selected\") self.assertEquals( spot_ids[inner_bottom.pk],", "matches a unique inner or mid spot\", ) spot_ids[spot[\"id\"]] =", "2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 from django.test", "\"center_longitude\": -40, \"distance\": 10, }, ) self.assertEquals( response.status_code, 200, \"Accepts", "\"distance\": 10}, ) self.assertEquals( response.status_code, 200, \"Accepts a query with", "\"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 150, }, )", "\"application/json\", \"Has the json header\" ) spots = json.loads(response.content) self.assertEquals(", "response.status_code, 200, \"Accepts a query with too large latitude\", )", "sorted by distance for i in range(0, 100): far_out =", "# testing a limit - should get the inner 4,", "json header\" ) spots = json.loads(response.content) self.assertEquals(len(spots), 6, \"Returns 6", "\"Returns 6 spots\") spot_ids = { inner_left.pk: 1, inner_right.pk: 1,", "all of the inner and mid, but # no outer", "c.get( \"/api/v1/spot\", { \"center_latitude\": center_lat, \"center_longitude\": center_long, \"distance\": 110, },", "inner_right.save() # Testing to make sure too small of a", "\"distance\": 60, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "\"distance\": 12, }, ) self.assertEquals( response.status_code, 200, \"Accepts the distance", "self.assertEquals( spot_ids[mid_bottom.pk], 2, \"Mid bottom was selected\" ) # Testing", "matches\" ) # Testing the inner ring response = c.get(", ") def test_large_longitude(self): c = Client() response = c.get( \"/api/v1/spot\"," ]
[ "verify=False) return str('Bearer ' + r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml', 'r')", "headers=headers, verify=False) # type(r.json()) # pprint(r.json()) # This gives the", "my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password'] url_base = 'http://'", "pprint(r.json()) # This gives the names of all the LSPs", "url_base + '1/topology/1/te-lsps' headers = { 'Accept': 'application/json' } headers", "= 'https://' + my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r", "headers = { 'Accept': 'application/json' } headers = { 'Content-type':", "import requests from requests.auth import HTTPBasicAuth from pprint import pprint", "'application/json' } # r = requests.get(url, headers=headers, auth=(authuser, authpwd)) get_token()", "requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url = 'https://' +", "return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password'] url_base", "r = requests.get(url, headers=headers, auth=(authuser, authpwd)) get_token() headers = {'Authorization':get_token(),", "This gives the names of all the LSPs that are", "= my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password'] url_base = 'http://' + my_variables_in_yaml['northstar']['ip']", "= {'Authorization':get_token(), 'Accept' : 'application/json', 'Content-Type' : 'application/json'} r =", "names of all the LSPs that are active for item", "get_token(): url = 'https://' + my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token =", "get_active_LSPs.py import json import requests from requests.auth import HTTPBasicAuth from", "'https://' + my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r =", "my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username'] authpwd =", "headers = { 'Content-type': 'application/json' } # r = requests.get(url,", "= { 'Accept': 'application/json' } headers = { 'Content-type': 'application/json'", "my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url = url_base + '1/topology/1/te-lsps' headers =", "r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml", "auth=(authuser, authpwd), headers=headers, verify=False) return str('Bearer ' + r.json()['access_token']) def", "headers=headers, auth=(authuser, authpwd)) get_token() headers = {'Authorization':get_token(), 'Accept' : 'application/json',", "# This gives the names of all the LSPs that", "+ ':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser,", "= my_variables_in_yaml['northstar']['password'] url_base = 'http://' + my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url", "str('Bearer ' + r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string)", "# this python script makes a rest call to Juniper", "my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username'] authpwd", "in r.json(): if item['operationalStatus'] == 'Active': print \"This LSP is", "auth=(authuser, authpwd)) get_token() headers = {'Authorization':get_token(), 'Accept' : 'application/json', 'Content-Type'", "import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url = 'https://' + my_variables_in_yaml['northstar']['ip']", "get active LSPs # usage: python get_active_LSPs.py import json import", "pprint import pprint import yaml from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning)", "usage: python get_active_LSPs.py import json import requests from requests.auth import", "headers=headers, verify=False) return str('Bearer ' + r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml',", "script makes a rest call to Juniper Northstar to get", "== 'Active': print \"This LSP is active: \" + item['name']", "InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url = 'https://' + my_variables_in_yaml['northstar']['ip'] +", "url = 'https://' + my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>}", "# r = requests.get(url, headers=headers, auth=(authuser, authpwd)) get_token() headers =", "r.json(): if item['operationalStatus'] == 'Active': print \"This LSP is active:", "'application/json' } headers = { 'Content-type': 'application/json' } # r", "active LSPs # usage: python get_active_LSPs.py import json import requests", "'Content-type': 'application/json' } # r = requests.get(url, headers=headers, auth=(authuser, authpwd))", "to get active LSPs # usage: python get_active_LSPs.py import json", "if item['operationalStatus'] == 'Active': print \"This LSP is active: \"", "requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url = 'https://' + my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token'", "my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password'] url_base =", "Northstar to get active LSPs # usage: python get_active_LSPs.py import", "to Juniper Northstar to get active LSPs # usage: python", "+ '1/topology/1/te-lsps' headers = { 'Accept': 'application/json' } headers =", "import yaml from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url", "call to Juniper Northstar to get active LSPs # usage:", "url = url_base + '1/topology/1/te-lsps' headers = { 'Accept': 'application/json'", "{'Authorization':get_token(), 'Accept' : 'application/json', 'Content-Type' : 'application/json'} r = requests.get(url,", "return str('Bearer ' + r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read()", "'1/topology/1/te-lsps' headers = { 'Accept': 'application/json' } headers = {", "= requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers, verify=False) return str('Bearer '", "'http://' + my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url = url_base + '1/topology/1/te-lsps'", "= requests.get(url, headers=headers, auth=(authuser, authpwd)) get_token() headers = {'Authorization':get_token(), 'Accept'", "headers = {'Authorization':get_token(), 'Accept' : 'application/json', 'Content-Type' : 'application/json'} r", "'Content-Type' : 'application/json'} r = requests.get(url, headers=headers, verify=False) # type(r.json())", "'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username']", "def import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file()", "type(r.json()) # pprint(r.json()) # This gives the names of all", "= {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers, verify=False)", "'Accept' : 'application/json', 'Content-Type' : 'application/json'} r = requests.get(url, headers=headers,", "} headers = { 'Content-type': 'application/json' } # r =", "the names of all the LSPs that are active for", "item['operationalStatus'] == 'Active': print \"This LSP is active: \" +", "of all the LSPs that are active for item in", "def get_token(): url = 'https://' + my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token", "HTTPBasicAuth from pprint import pprint import yaml from requests.packages.urllib3.exceptions import", "{ 'Accept': 'application/json' } headers = { 'Content-type': 'application/json' }", "import HTTPBasicAuth from pprint import pprint import yaml from requests.packages.urllib3.exceptions", "that are active for item in r.json(): if item['operationalStatus'] ==", "get_token() headers = {'Authorization':get_token(), 'Accept' : 'application/json', 'Content-Type' : 'application/json'}", "requests.get(url, headers=headers, auth=(authuser, authpwd)) get_token() headers = {'Authorization':get_token(), 'Accept' :", "are active for item in r.json(): if item['operationalStatus'] == 'Active':", "rest call to Juniper Northstar to get active LSPs #", "yaml from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url =", "requests.get(url, headers=headers, verify=False) # type(r.json()) # pprint(r.json()) # This gives", "this python script makes a rest call to Juniper Northstar", "# pprint(r.json()) # This gives the names of all the", "+ r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return", "# usage: python get_active_LSPs.py import json import requests from requests.auth", "'application/json'} r = requests.get(url, headers=headers, verify=False) # type(r.json()) # pprint(r.json())", "LSPs # usage: python get_active_LSPs.py import json import requests from", "python get_active_LSPs.py import json import requests from requests.auth import HTTPBasicAuth", "my_variables_in_yaml['northstar']['password'] url_base = 'http://' + my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url =", "import json import requests from requests.auth import HTTPBasicAuth from pprint", "} # r = requests.get(url, headers=headers, auth=(authuser, authpwd)) get_token() headers", "item in r.json(): if item['operationalStatus'] == 'Active': print \"This LSP", "import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser", "{ 'Content-type': 'application/json' } # r = requests.get(url, headers=headers, auth=(authuser,", "from pprint import pprint import yaml from requests.packages.urllib3.exceptions import InsecureRequestWarning", "authpwd = my_variables_in_yaml['northstar']['password'] url_base = 'http://' + my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/'", "for item in r.json(): if item['operationalStatus'] == 'Active': print \"This", "= 'http://' + my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url = url_base +", "data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers,", "{\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers, verify=False) return", "pprint import yaml from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token():", "r = requests.get(url, headers=headers, verify=False) # type(r.json()) # pprint(r.json()) #", "verify=False) # type(r.json()) # pprint(r.json()) # This gives the names", "from requests.auth import HTTPBasicAuth from pprint import pprint import yaml", "':8091/NorthStar/API/v2/tenant/' url = url_base + '1/topology/1/te-lsps' headers = { 'Accept':", "authpwd)) get_token() headers = {'Authorization':get_token(), 'Accept' : 'application/json', 'Content-Type' :", "requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers, verify=False) return str('Bearer ' +", "url_base = 'http://' + my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url = url_base", "active for item in r.json(): if item['operationalStatus'] == 'Active': print", "+ my_variables_in_yaml['northstar']['ip'] + ':8091/NorthStar/API/v2/tenant/' url = url_base + '1/topology/1/te-lsps' headers", "my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password'] url_base = 'http://' + my_variables_in_yaml['northstar']['ip'] +", "gives the names of all the LSPs that are active", "my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser =", "authpwd), headers=headers, verify=False) return str('Bearer ' + r.json()['access_token']) def import_variables_from_file():", "all the LSPs that are active for item in r.json():", "json import requests from requests.auth import HTTPBasicAuth from pprint import", "python script makes a rest call to Juniper Northstar to", "my_variables_file.close() return my_variables_in_yaml my_variables_in_yaml=import_variables_from_file() authuser = my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password']", "= url_base + '1/topology/1/te-lsps' headers = { 'Accept': 'application/json' }", "from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def get_token(): url = 'https://'", "my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url, data=json.dumps(data_to_get_token),", "'Accept': 'application/json' } headers = { 'Content-type': 'application/json' } #", "Juniper Northstar to get active LSPs # usage: python get_active_LSPs.py", ": 'application/json', 'Content-Type' : 'application/json'} r = requests.get(url, headers=headers, verify=False)", "= requests.get(url, headers=headers, verify=False) # type(r.json()) # pprint(r.json()) # This", "' + r.json()['access_token']) def import_variables_from_file(): my_variables_file=open('variables.yml', 'r') my_variables_in_string=my_variables_file.read() my_variables_in_yaml=yaml.load(my_variables_in_string) my_variables_file.close()", "r = requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers, verify=False) return str('Bearer", "+ ':8091/NorthStar/API/v2/tenant/' url = url_base + '1/topology/1/te-lsps' headers = {", "= { 'Content-type': 'application/json' } # r = requests.get(url, headers=headers,", "# type(r.json()) # pprint(r.json()) # This gives the names of", "LSPs that are active for item in r.json(): if item['operationalStatus']", "data=json.dumps(data_to_get_token), auth=(authuser, authpwd), headers=headers, verify=False) return str('Bearer ' + r.json()['access_token'])", "'application/json', 'Content-Type' : 'application/json'} r = requests.get(url, headers=headers, verify=False) #", "makes a rest call to Juniper Northstar to get active", "authuser = my_variables_in_yaml['northstar']['username'] authpwd = my_variables_in_yaml['northstar']['password'] url_base = 'http://' +", "requests from requests.auth import HTTPBasicAuth from pprint import pprint import", "a rest call to Juniper Northstar to get active LSPs", ": 'application/json'} r = requests.get(url, headers=headers, verify=False) # type(r.json()) #", "import pprint import yaml from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) def", "the LSPs that are active for item in r.json(): if", "+ my_variables_in_yaml['northstar']['ip'] + ':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url,", "':8443/oauth2/token' data_to_get_token = {\"grant_type\":\"password\",\"username\":authuser,\"password\":<PASSWORD>} r = requests.post(url, data=json.dumps(data_to_get_token), auth=(authuser, authpwd),", "requests.auth import HTTPBasicAuth from pprint import pprint import yaml from" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "# import pydffi import sys F = pydffi.FFI() CU =", "distributed under the License is distributed on an \"AS IS\"", "permissions and # limitations under the License. # RUN: \"%python\"", "governing permissions and # limitations under the License. # RUN:", "the specific language governing permissions and # limitations under the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "and # limitations under the License. # RUN: \"%python\" \"%s\"", "except in compliance with the License. # You may obtain", "import pydffi import sys F = pydffi.FFI() CU = F.cdef('''", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "<NAME> <<EMAIL>> # # Licensed under the Apache License, Version", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "the License. # RUN: \"%python\" \"%s\" # import pydffi import", "int b; } A; ''') assert(CU.types.MyInt == F.Int32Ty) assert(isinstance(CU.types.A, pydffi.StructType))", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "struct { int a; int b; } A; ''') assert(CU.types.MyInt", "# RUN: \"%python\" \"%s\" # import pydffi import sys F", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "limitations under the License. # RUN: \"%python\" \"%s\" # import", "a; int b; } A; ''') assert(CU.types.MyInt == F.Int32Ty) assert(isinstance(CU.types.A,", "Copyright 2018 <NAME> <<EMAIL>> # # Licensed under the Apache", "CONDITIONS OF ANY KIND, either express or implied. # See", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "License for the specific language governing permissions and # limitations", "import sys F = pydffi.FFI() CU = F.cdef(''' #include <stdint.h>", "License. # RUN: \"%python\" \"%s\" # import pydffi import sys", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "MyInt; typedef struct { int a; int b; } A;", "the License for the specific language governing permissions and #", "# limitations under the License. # RUN: \"%python\" \"%s\" #", "(the \"License\"); # you may not use this file except", "Apache License, Version 2.0 (the \"License\"); # you may not", "# you may not use this file except in compliance", "typedef struct { int a; int b; } A; ''')", "either express or implied. # See the License for the", "pydffi import sys F = pydffi.FFI() CU = F.cdef(''' #include", "OR CONDITIONS OF ANY KIND, either express or implied. #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "F = pydffi.FFI() CU = F.cdef(''' #include <stdint.h> typedef int32_t", "under the License. # RUN: \"%python\" \"%s\" # import pydffi", "the License is distributed on an \"AS IS\" BASIS, #", "in compliance with the License. # You may obtain a", "# Copyright 2018 <NAME> <<EMAIL>> # # Licensed under the", "software # distributed under the License is distributed on an", "= F.cdef(''' #include <stdint.h> typedef int32_t MyInt; typedef struct {", "# # Unless required by applicable law or agreed to", "\"%s\" # import pydffi import sys F = pydffi.FFI() CU", "int a; int b; } A; ''') assert(CU.types.MyInt == F.Int32Ty)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Version 2.0 (the \"License\"); # you may not use this", "= pydffi.FFI() CU = F.cdef(''' #include <stdint.h> typedef int32_t MyInt;", "pydffi.FFI() CU = F.cdef(''' #include <stdint.h> typedef int32_t MyInt; typedef", "F.cdef(''' #include <stdint.h> typedef int32_t MyInt; typedef struct { int", "law or agreed to in writing, software # distributed under", "2018 <NAME> <<EMAIL>> # # Licensed under the Apache License,", "implied. # See the License for the specific language governing", "CU = F.cdef(''' #include <stdint.h> typedef int32_t MyInt; typedef struct", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"License\"); # you may not use this file except in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "int32_t MyInt; typedef struct { int a; int b; }", "\"%python\" \"%s\" # import pydffi import sys F = pydffi.FFI()", "<stdint.h> typedef int32_t MyInt; typedef struct { int a; int", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "RUN: \"%python\" \"%s\" # import pydffi import sys F =", "{ int a; int b; } A; ''') assert(CU.types.MyInt ==", "sys F = pydffi.FFI() CU = F.cdef(''' #include <stdint.h> typedef", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "to in writing, software # distributed under the License is", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "You may obtain a copy of the License at #", "typedef int32_t MyInt; typedef struct { int a; int b;", "language governing permissions and # limitations under the License. #", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "<<EMAIL>> # # Licensed under the Apache License, Version 2.0", "required by applicable law or agreed to in writing, software", "#include <stdint.h> typedef int32_t MyInt; typedef struct { int a;", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "the Apache License, Version 2.0 (the \"License\"); # you may" ]
[ "self._sim_env.physics.data.qpos[:] = currp + (tg - currp) / 3 self._sim_env.physics.data.qvel[:]", "= currp + (tg - currp) / 3 self._sim_env.physics.data.qvel[:] =", "= self.get_observation() sg.append(gim) elif self.difficulty == 'h': if numg ==", "0]) _, gim = self.get_observation() sg.append(gim) elif self.difficulty == 'm':", "camera_id='fixed') im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_LANCZOS4) return obs, im", "2.0 (the \"License\"); # you may not use this file", "self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) self._sim_env.physics.data.qpos[:] =", "[ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0", "the License. \"\"\"Environment wrapper around the maze navigation environment. \"\"\"", "self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] -", "from __future__ import division from __future__ import print_function import copy", "around the Simple maze environment.\"\"\" def __init__(self, difficulty=None): \"\"\"Initialize the", "permissions and # limitations under the License. \"\"\"Environment wrapper around", "self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) elif self.difficulty ==", "0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) elif numg", "# Copyright 2021 The Google Research Authors. # # Licensed", "= copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self._sim_env.physics.data.qpos[:] = currp + 2 * (tg - currp) /", "self.difficulty == 'm': if numg == 1: self._sim_env.physics.data.qpos[:] = [", "print_function import copy from . import simple_maze import cv2 import", "environment with the specified difficulty.\"\"\" self.difficulty = difficulty self._sim_env =", "== 2: self._sim_env.physics.data.qpos[:] = currp + (tg - currp) /", "action=None): \"\"\"Steps the environment.\"\"\" time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0", "image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True", "copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) self._sim_env.physics.data.qpos[:]", "0]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _,", "self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0 return time_step def get_observation(self): \"\"\"Return image", "difficulty=None): \"\"\"Initialize the environment with the specified difficulty.\"\"\" self.difficulty =", "use this file except in compliance with the License. #", "# limitations under the License. \"\"\"Environment wrapper around the maze", "specified difficulty.\"\"\" self.difficulty = difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount =", "class Environment(object): \"\"\"Wrapper around the Simple maze environment.\"\"\" def __init__(self,", "= currp + (tg - currp) / 2 self._sim_env.physics.data.qvel[:] =", "'h': if numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'],", "= 0 return time_step def get_observation(self): \"\"\"Return image observation.\"\"\" obs", "from __future__ import print_function import copy from . import simple_maze", "numg == 1: self._sim_env.physics.data.qpos[:] = currp + (tg - currp)", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "time_step def get_observation(self): \"\"\"Return image observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics) im", "License. # You may obtain a copy of the License", "256, camera_id='fixed') im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_LANCZOS4) return obs,", "= 0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) elif", "returs the ground truth sub goal images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:])", "/ 2 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim =", "under the License is distributed on an \"AS IS\" BASIS,", "_, gim = self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:]", "License for the specific language governing permissions and # limitations", "and returns the goal image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv =", "0]) _, gim = self.get_observation() sg.append(gim) elif numg == 2:", "2 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim = self.get_observation()", "self.get_observation() self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset", "self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:]", "numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y']", "self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = [] if self.difficulty ==", "self.step([0, 0]) _, gim = self.get_observation() self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:]", "gim def get_subgoal_ims(self, numg): \"\"\"Computes and returs the ground truth", "difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount = 0 def reset(self): \"\"\"Resets", "tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = [] if self.difficulty == 'e':", "2: self._sim_env.physics.data.qpos[:] = currp + (tg - currp) / 3", "self.difficulty == 'h': if numg == 1: self._sim_env.physics.data.qpos[:] = [", "= 0 def reset(self): \"\"\"Resets the environment.\"\"\" self.stepcount = 0", "self._sim_env.task.dontreset = False return gim def get_subgoal_ims(self, numg): \"\"\"Computes and", "self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = [", "obs = self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256, 256, camera_id='fixed') im =", "from __future__ import absolute_import from __future__ import division from __future__", "= 0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) sg", "wrapper around the maze navigation environment. \"\"\" from __future__ import", "in compliance with the License. # You may obtain a", "copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg", "= self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y']", "software # distributed under the License is distributed on an", "around the maze navigation environment. \"\"\" from __future__ import absolute_import", "self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset = False return gim", "__future__ import division from __future__ import print_function import copy from", "copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = []", "time_step = self._sim_env.reset() return time_step def get_goal_im(self): \"\"\"Computes and returns", "Google Research Authors. # # Licensed under the Apache License,", "0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] =", "Simple maze environment.\"\"\" def __init__(self, difficulty=None): \"\"\"Initialize the environment with", "self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256, 256, camera_id='fixed') im = cv2.resize(im, (64,", "= currv self.step([0, 0]) self._sim_env.task.dontreset = False return gim def", "= currv self.step([0, 0]) self._sim_env.task.dontreset = False return sg def", "import simple_maze import cv2 import numpy as np class Environment(object):", "currv self.step([0, 0]) self._sim_env.task.dontreset = False return gim def get_subgoal_ims(self,", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) elif self.difficulty", "License. \"\"\"Environment wrapper around the maze navigation environment. \"\"\" from", "= self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0 return time_step def get_observation(self): \"\"\"Return", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "from . import simple_maze import cv2 import numpy as np", "to in writing, software # distributed under the License is", "get_observation(self): \"\"\"Return image observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256,", "self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A',", "# See the License for the specific language governing permissions", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "False return gim def get_subgoal_ims(self, numg): \"\"\"Computes and returs the", "maze environment.\"\"\" def __init__(self, difficulty=None): \"\"\"Initialize the environment with the", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "sub goal images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset", "tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim = self.get_observation()", "self.stepcount = 0 time_step = self._sim_env.reset() return time_step def get_goal_im(self):", "with the License. # You may obtain a copy of", "get_subgoal_ims(self, numg): \"\"\"Computes and returs the ground truth sub goal", "0]) _, gim = self.get_observation() sg.append(gim) sg = np.array(sg) self._sim_env.physics.data.qpos[:]", "sg.append(gim) sg = np.array(sg) self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv", "= True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = [] if self.difficulty", "(tg - currp) / 2 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0])", "self._sim_env.physics.data.qpos[:] = currp + (tg - currp) / 2 self._sim_env.physics.data.qvel[:]", "self._sim_env.physics.render(256, 256, camera_id='fixed') im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_LANCZOS4) return", "sg.append(gim) self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25]", "self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = currp", "import numpy as np class Environment(object): \"\"\"Wrapper around the Simple", "reset(self): \"\"\"Resets the environment.\"\"\" self.stepcount = 0 time_step = self._sim_env.reset()", "= self.get_observation() self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0])", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "= copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg =", "distributed under the License is distributed on an \"AS IS\"", "0 self.step([0, 0]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None): \"\"\"Steps the environment.\"\"\" time_step", "sg.append(gim) self._sim_env.physics.data.qpos[:] = currp + 2 * (tg - currp)", "the Simple maze environment.\"\"\" def __init__(self, difficulty=None): \"\"\"Initialize the environment", "express or implied. # See the License for the specific", "= copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg =", "except in compliance with the License. # You may obtain", "the specified difficulty.\"\"\" self.difficulty = difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "gim = self.get_observation() sg.append(gim) sg = np.array(sg) self._sim_env.physics.data.qpos[:] = currp", "writing, software # distributed under the License is distributed on", "\"\"\"Environment wrapper around the maze navigation environment. \"\"\" from __future__", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None): \"\"\"Steps the environment.\"\"\"", "0 return time_step def get_observation(self): \"\"\"Return image observation.\"\"\" obs =", "= 0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:]", "the environment with the specified difficulty.\"\"\" self.difficulty = difficulty self._sim_env", "sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'],", ". import simple_maze import cv2 import numpy as np class", "the environment.\"\"\" time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0 return time_step", "= self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = currp + 2 * (tg", "CONDITIONS OF ANY KIND, either express or implied. # See", "self.difficulty = difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount = 0 def", "1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25]", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "* (tg - currp) / 3 self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "import cv2 import numpy as np class Environment(object): \"\"\"Wrapper around", "import copy from . import simple_maze import cv2 import numpy", "'m': if numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'],", "= True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] =", "= copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = [] if self.difficulty == 'e': if", "OR CONDITIONS OF ANY KIND, either express or implied. #", "'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0])", "= self._sim_env.reset() return time_step def get_goal_im(self): \"\"\"Computes and returns the", "the License is distributed on an \"AS IS\" BASIS, #", "= tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) self._sim_env.physics.data.qpos[:] = tg", "gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = currp + 2 *", "self.stepcount = 0 def reset(self): \"\"\"Resets the environment.\"\"\" self.stepcount =", "numg): \"\"\"Computes and returs the ground truth sub goal images.\"\"\"", "currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset = False return", "def is_goal(self): \"\"\"Checks if the current state is a goal", "self.get_observation() sg.append(gim) elif self.difficulty == 'h': if numg == 1:", "import print_function import copy from . import simple_maze import cv2", "currv self.step([0, 0]) self._sim_env.task.dontreset = False return sg def is_goal(self):", "self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None): \"\"\"Steps the environment.\"\"\" time_step = self._sim_env.step(action)", "self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = currp + 2 * (tg -", "self.step([0, 0]) self._sim_env.task.dontreset = False return sg def is_goal(self): \"\"\"Checks", "sg def is_goal(self): \"\"\"Checks if the current state is a", "2021 The Google Research Authors. # # Licensed under the", "law or agreed to in writing, software # distributed under", "limitations under the License. \"\"\"Environment wrapper around the maze navigation", "= 0 time_step = self._sim_env.reset() return time_step def get_goal_im(self): \"\"\"Computes", "\"\"\" from __future__ import absolute_import from __future__ import division from", "coding=utf-8 # Copyright 2021 The Google Research Authors. # #", "tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:]", "sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'],", "= 0 self.step([0, 0]) _, gim = self.get_observation() self._sim_env.physics.data.qpos[:] =", "maze navigation environment. \"\"\" from __future__ import absolute_import from __future__", "self._sim_env.physics.data.qvel[:] = 0 return time_step def get_observation(self): \"\"\"Return image observation.\"\"\"", "image observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256, 256, camera_id='fixed')", "is a goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None): \"\"\"Steps", "def get_observation(self): \"\"\"Return image observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics) im =", "_, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'],", "sg.append(gim) elif self.difficulty == 'h': if numg == 1: self._sim_env.physics.data.qpos[:]", "may obtain a copy of the License at # #", "'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim", "\"\"\"Initialize the environment with the specified difficulty.\"\"\" self.difficulty = difficulty", "self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset = False return sg", "0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) sg =", "\"\"\"Computes and returns the goal image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim = self.get_observation() self._sim_env.physics.data.qpos[:]", "copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2])", "= 0 self.step([0, 0]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0", "may not use this file except in compliance with the", "gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A',", "environment.\"\"\" def __init__(self, difficulty=None): \"\"\"Initialize the environment with the specified", "elif numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A',", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "'e': if numg == 1: self._sim_env.physics.data.qpos[:] = currp + (tg", "this file except in compliance with the License. # You", "import absolute_import from __future__ import division from __future__ import print_function", "0]) _, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A',", "elif numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A',", "goal image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset =", "== 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] -", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "== 1: self._sim_env.physics.data.qpos[:] = currp + (tg - currp) /", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim", "\"\"\"Wrapper around the Simple maze environment.\"\"\" def __init__(self, difficulty=None): \"\"\"Initialize", "= False return gim def get_subgoal_ims(self, numg): \"\"\"Computes and returs", "elif self.difficulty == 'h': if numg == 1: self._sim_env.physics.data.qpos[:] =", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import division from __future__ import print_function import copy from .", "currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg", "- currp) / 3 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _,", "def get_subgoal_ims(self, numg): \"\"\"Computes and returs the ground truth sub", "1: self._sim_env.physics.data.qpos[:] = currp + (tg - currp) / 2", "self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "gim = self.get_observation() sg.append(gim) elif self.difficulty == 'h': if numg", "the environment.\"\"\" self.stepcount = 0 time_step = self._sim_env.reset() return time_step", "step(self, action=None): \"\"\"Steps the environment.\"\"\" time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:] =", "self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] =", "self.get_observation() sg.append(gim) elif self.difficulty == 'm': if numg == 1:", "truth sub goal images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:])", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = currp +", "= self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = currp", "return self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None): \"\"\"Steps the environment.\"\"\" time_step =", "= simple_maze.navigate(difficulty=difficulty) self.stepcount = 0 def reset(self): \"\"\"Resets the environment.\"\"\"", "environment. \"\"\" from __future__ import absolute_import from __future__ import division", "\"\"\"Resets the environment.\"\"\" self.stepcount = 0 time_step = self._sim_env.reset() return", "environment.\"\"\" time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0 return time_step def", "= tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim =", "time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0 return time_step def get_observation(self):", "or implied. # See the License for the specific language", "2 * (tg - currp) / 3 self._sim_env.physics.data.qvel[:] = 0", "self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A',", "self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount = 0 def reset(self): \"\"\"Resets the", "self._sim_env.reset() return time_step def get_goal_im(self): \"\"\"Computes and returns the goal", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "Copyright 2021 The Google Research Authors. # # Licensed under", "goal images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset =", "sg.append(gim) elif self.difficulty == 'm': if numg == 1: self._sim_env.physics.data.qpos[:]", "numg == 2: self._sim_env.physics.data.qpos[:] = currp + (tg - currp)", "returns the goal image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:])", "self.get_observation() sg.append(gim) sg = np.array(sg) self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:]", "= currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset = False", "self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = [] if", "= self.get_observation() sg.append(gim) elif self.difficulty == 'm': if numg ==", "tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "_, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = currp + 2", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "= [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] =", "as np class Environment(object): \"\"\"Wrapper around the Simple maze environment.\"\"\"", "self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _,", "False return sg def is_goal(self): \"\"\"Checks if the current state", "return time_step def get_goal_im(self): \"\"\"Computes and returns the goal image.\"\"\"", "== 'e': if numg == 1: self._sim_env.physics.data.qpos[:] = currp +", "return time_step def get_observation(self): \"\"\"Return image observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics)", "numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y']", "# # Unless required by applicable law or agreed to", "0]) _, gim = self.get_observation() self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] =", "\"\"\"Steps the environment.\"\"\" time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:] = 0 return", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "0 def reset(self): \"\"\"Resets the environment.\"\"\" self.stepcount = 0 time_step", "Version 2.0 (the \"License\"); # you may not use this", "_, gim = self.get_observation() self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv", "is_goal(self): \"\"\"Checks if the current state is a goal state.\"\"\"", "= self.get_observation() sg.append(gim) sg = np.array(sg) self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:]", "= self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256, 256, camera_id='fixed') im = cv2.resize(im,", "get_goal_im(self): \"\"\"Computes and returns the goal image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:])", "= difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount = 0 def reset(self):", "gim = self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] =", "gim = self.get_observation() self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0,", "__future__ import absolute_import from __future__ import division from __future__ import", "implied. # See the License for the specific language governing", "'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0])", "\"\"\"Checks if the current state is a goal state.\"\"\" return", "under the Apache License, Version 2.0 (the \"License\"); # you", "self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) sg = np.array(sg)", "and returs the ground truth sub goal images.\"\"\" currp =", "copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg = [] if self.difficulty == 'e': if numg", "= copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0])", "- currp) / 2 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _,", "by applicable law or agreed to in writing, software #", "= self.get_observation() sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = [", "(tg - currp) / 3 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0])", "the ground truth sub goal images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv", "time_step def get_goal_im(self): \"\"\"Computes and returns the goal image.\"\"\" currp", "# coding=utf-8 # Copyright 2021 The Google Research Authors. #", "np.array(sg) self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset", "np class Environment(object): \"\"\"Wrapper around the Simple maze environment.\"\"\" def", "2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25]", "current state is a goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def step(self,", "__init__(self, difficulty=None): \"\"\"Initialize the environment with the specified difficulty.\"\"\" self.difficulty", "= [] if self.difficulty == 'e': if numg == 1:", "self._sim_env.task.dontreset = False return sg def is_goal(self): \"\"\"Checks if the", "cv2 import numpy as np class Environment(object): \"\"\"Wrapper around the", "if numg == 1: self._sim_env.physics.data.qpos[:] = currp + (tg -", "a goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None): \"\"\"Steps the", "return sg def is_goal(self): \"\"\"Checks if the current state is", "= [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] =", "[] if self.difficulty == 'e': if numg == 1: self._sim_env.physics.data.qpos[:]", "0]) self._sim_env.task.dontreset = False return sg def is_goal(self): \"\"\"Checks if", "currp + 2 * (tg - currp) / 3 self._sim_env.physics.data.qvel[:]", "0]) _, gim = self.get_observation() sg.append(gim) self._sim_env.physics.data.qpos[:] = currp +", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "the current state is a goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def", "Unless required by applicable law or agreed to in writing,", "simple_maze.navigate(difficulty=difficulty) self.stepcount = 0 def reset(self): \"\"\"Resets the environment.\"\"\" self.stepcount", "language governing permissions and # limitations under the License. \"\"\"Environment", "the specific language governing permissions and # limitations under the", "self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0]) self._sim_env.task.dontreset =", "<gh_stars>1000+ # coding=utf-8 # Copyright 2021 The Google Research Authors.", "0]) self._sim_env.task.dontreset = False return gim def get_subgoal_ims(self, numg): \"\"\"Computes", "applicable law or agreed to in writing, software # distributed", "if numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A',", "return gim def get_subgoal_ims(self, numg): \"\"\"Computes and returs the ground", "difficulty.\"\"\" self.difficulty = difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty) self.stepcount = 0", "1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] - 0.25]", "with the specified difficulty.\"\"\" self.difficulty = difficulty self._sim_env = simple_maze.navigate(difficulty=difficulty)", "[ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0", "= self._sim_env.physics.render(256, 256, camera_id='fixed') im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_LANCZOS4)", "in writing, software # distributed under the License is distributed", "== 'h': if numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A',", "def step(self, action=None): \"\"\"Steps the environment.\"\"\" time_step = self._sim_env.step(action) self._sim_env.physics.data.qvel[:]", "currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) sg", "__future__ import print_function import copy from . import simple_maze import", "governing permissions and # limitations under the License. \"\"\"Environment wrapper", "+ 2 * (tg - currp) / 3 self._sim_env.physics.data.qvel[:] =", "self.step([0, 0]) self._sim_env.task.dontreset = False return gim def get_subgoal_ims(self, numg):", "currp) / 3 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim", "the maze navigation environment. \"\"\" from __future__ import absolute_import from", "Authors. # # Licensed under the Apache License, Version 2.0", "observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256, 256, camera_id='fixed') im", "self.difficulty == 'e': if numg == 1: self._sim_env.physics.data.qpos[:] = currp", "elif numg == 2: self._sim_env.physics.data.qpos[:] = currp + (tg -", "currp) / 2 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim", "im = self._sim_env.physics.render(256, 256, camera_id='fixed') im = cv2.resize(im, (64, 64),", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "== 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] -", "copy from . import simple_maze import cv2 import numpy as", "ground truth sub goal images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv =", "# You may obtain a copy of the License at", "state is a goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics) def step(self, action=None):", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "0 time_step = self._sim_env.reset() return time_step def get_goal_im(self): \"\"\"Computes and", "division from __future__ import print_function import copy from . import", "numg == 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y']", "numpy as np class Environment(object): \"\"\"Wrapper around the Simple maze", "2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25]", "currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:]", "self.step([0, 0]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0])", "environment.\"\"\" self.stepcount = 0 time_step = self._sim_env.reset() return time_step def", "+ (tg - currp) / 3 self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "elif self.difficulty == 'm': if numg == 1: self._sim_env.physics.data.qpos[:] =", "_, gim = self.get_observation() sg.append(gim) sg = np.array(sg) self._sim_env.physics.data.qpos[:] =", "the License for the specific language governing permissions and #", "under the License. \"\"\"Environment wrapper around the maze navigation environment.", "images.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset = True", "+ (tg - currp) / 2 self._sim_env.physics.data.qvel[:] = 0 self.step([0,", "0]) _, gim = self.get_observation() sg.append(gim) elif self.difficulty == 'h':", "_, gim = self.get_observation() sg.append(gim) elif self.difficulty == 'h': if", "Apache License, Version 2.0 (the \"License\"); # you may not", "simple_maze import cv2 import numpy as np class Environment(object): \"\"\"Wrapper", "either express or implied. # See the License for the", "def get_goal_im(self): \"\"\"Computes and returns the goal image.\"\"\" currp =", "Environment(object): \"\"\"Wrapper around the Simple maze environment.\"\"\" def __init__(self, difficulty=None):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] - 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _,", "== 'm': if numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A',", "= np.array(sg) self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0, 0])", "- 0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim =", "if self.difficulty == 'e': if numg == 1: self._sim_env.physics.data.qpos[:] =", "def reset(self): \"\"\"Resets the environment.\"\"\" self.stepcount = 0 time_step =", "if numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A',", "numg == 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y']", "= False return sg def is_goal(self): \"\"\"Checks if the current", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "self._sim_env.task.dontreset = True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:]", "/ 3 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim =", "navigation environment. \"\"\" from __future__ import absolute_import from __future__ import", "sg = np.array(sg) self._sim_env.physics.data.qpos[:] = currp self._sim_env.physics.data.qvel[:] = currv self.step([0,", "self.step([0, 0]) _, gim = self.get_observation() sg.append(gim) elif numg ==", "and # limitations under the License. \"\"\"Environment wrapper around the", "True tg = copy.deepcopy(self._sim_env.physics.named.data.geom_xpos['target'][:2]) self._sim_env.physics.data.qpos[:] = tg self._sim_env.physics.data.qvel[:] = 0", "absolute_import from __future__ import division from __future__ import print_function import", "0.25] self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim = self.get_observation()", "\"\"\"Return image observation.\"\"\" obs = self._sim_env.task.get_observation(self._sim_env.physics) im = self._sim_env.physics.render(256, 256,", "= currp + 2 * (tg - currp) / 3", "0 self.step([0, 0]) _, gim = self.get_observation() self._sim_env.physics.data.qpos[:] = currp", "sg = [] if self.difficulty == 'e': if numg ==", "\"License\"); # you may not use this file except in", "\"\"\"Computes and returs the ground truth sub goal images.\"\"\" currp", "if the current state is a goal state.\"\"\" return self._sim_env.task.is_goal(self._sim_env.physics)", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "== 2: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall1A', 'x'], self._sim_env.physics.named.model.geom_pos['wall1A', 'y'] -", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim = self.get_observation() sg.append(gim)", "gim = self.get_observation() sg.append(gim) elif self.difficulty == 'm': if numg", "The Google Research Authors. # # Licensed under the Apache", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "sg.append(gim) elif numg == 2: self._sim_env.physics.data.qpos[:] = currp + (tg", "currp + (tg - currp) / 2 self._sim_env.physics.data.qvel[:] = 0", "3 self._sim_env.physics.data.qvel[:] = 0 self.step([0, 0]) _, gim = self.get_observation()", "You may obtain a copy of the License at #", "the goal image.\"\"\" currp = copy.deepcopy(self._sim_env.physics.data.qpos[:]) currv = copy.deepcopy(self._sim_env.physics.data.qvel[:]) self._sim_env.task.dontreset", "currp + (tg - currp) / 3 self._sim_env.physics.data.qvel[:] = 0", "== 1: self._sim_env.physics.data.qpos[:] = [ self._sim_env.physics.named.model.geom_pos['wall2A', 'x'], self._sim_env.physics.named.model.geom_pos['wall2A', 'y'] -", "the Apache License, Version 2.0 (the \"License\"); # you may", "def __init__(self, difficulty=None): \"\"\"Initialize the environment with the specified difficulty.\"\"\"", "Research Authors. # # Licensed under the Apache License, Version", "_, gim = self.get_observation() sg.append(gim) elif self.difficulty == 'm': if" ]
[ "Django 3.2.5 on 2021-07-11 23:51 from django.db import migrations, models", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "[ ('trips', '0003_alter_trip_state'), ] operations = [ migrations.CreateModel( name='Invoice', fields=[", "migrations.CreateModel( name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)),", "[ migrations.CreateModel( name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serie',", "primary_key=True, serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount',", "('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount',", "23:51 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('trips', '0003_alter_trip_state'),", "('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id', models.ForeignKey(db_column='trip_id', on_delete=django.db.models.deletion.PROTECT, to='trips.trip')),", "models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id', models.ForeignKey(db_column='trip_id', on_delete=django.db.models.deletion.PROTECT, to='trips.trip')), ], ), ]", "fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)),", "dependencies = [ ('trips', '0003_alter_trip_state'), ] operations = [ migrations.CreateModel(", "] operations = [ migrations.CreateModel( name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True,", "models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id', models.ForeignKey(db_column='trip_id', on_delete=django.db.models.deletion.PROTECT,", "Generated by Django 3.2.5 on 2021-07-11 23:51 from django.db import", "class Migration(migrations.Migration): dependencies = [ ('trips', '0003_alter_trip_state'), ] operations =", "3.2.5 on 2021-07-11 23:51 from django.db import migrations, models import", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('trips', '0003_alter_trip_state'), ] operations", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('trips', '0003_alter_trip_state'), ]", "= [ ('trips', '0003_alter_trip_state'), ] operations = [ migrations.CreateModel( name='Invoice',", "('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id', models.ForeignKey(db_column='trip_id', on_delete=django.db.models.deletion.PROTECT, to='trips.trip')), ], ),", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "<gh_stars>0 # Generated by Django 3.2.5 on 2021-07-11 23:51 from", "2021-07-11 23:51 from django.db import migrations, models import django.db.models.deletion class", "Migration(migrations.Migration): dependencies = [ ('trips', '0003_alter_trip_state'), ] operations = [", "verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id',", "name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number',", "models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id', models.ForeignKey(db_column='trip_id', on_delete=django.db.models.deletion.PROTECT, to='trips.trip')), ],", "serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()),", "('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0)), ('base_amount', models.FloatField()), ('trip_id', models.ForeignKey(db_column='trip_id',", "on 2021-07-11 23:51 from django.db import migrations, models import django.db.models.deletion", "'0003_alter_trip_state'), ] operations = [ migrations.CreateModel( name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True,", "= [ migrations.CreateModel( name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "('trips', '0003_alter_trip_state'), ] operations = [ migrations.CreateModel( name='Invoice', fields=[ ('id',", "operations = [ migrations.CreateModel( name='Invoice', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False,", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('trips',", "by Django 3.2.5 on 2021-07-11 23:51 from django.db import migrations,", "# Generated by Django 3.2.5 on 2021-07-11 23:51 from django.db", "models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serie', models.CharField(max_length=4)), ('number', models.CharField(max_length=8)), ('tax_amount', models.FloatField(default=0.0))," ]
[ "self, strategy: contract.Contract, ): \"\"\"Orchestration function that harvests outstanding rewards.", "confirm_transaction from src.utils import get_abi from src.discord_utils import get_hash_from_failed_tx_error from", "Decimal from time import sleep import requests from hexbytes import", "to None. harvested (Decimal, optional): Amount of Sushi harvested. Defaults", "== \"tend\": self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options) elif", ") # Times of last harvest if self.chain in [Network.Ethereum,", "self, strategy: contract.Contract, harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS, ) -> bool:", "of Sushi harvested. Defaults to None. \"\"\" try: tx_hash, max_target_block", "gas_price = int(1.1 * self.web3.eth.gas_price) # Estimated gas price +", "for calling harvestNoReturn\" ) want_address = strategy.functions.want().call() want = self.web3.eth.contract(", "# min ratio of want to total vault AUM required", "1.1), \"gwei\") elif self.chain in [Network.Arbitrum, Network.Fantom]: gas_price = int(1.1", "import sleep import requests from hexbytes import HexBytes from web3", "True if time since last harvest is > harvest_interval_threshold, else", "strategy (contract) Raises: Exception: If we have an issue sending", "for i in range(1, NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number", "of want to total vault AUM required to harvest NUM_FLASHBOTS_BUNDLES", "self.logger.info(f\"Bundle broadcasted at {max_target_block}\") except ValueError as e: self.logger.error(f\"Error in", "tx_hash = self.__send_tend_tx(strategy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded:", "for ACL if not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL is not", "on eth and then send transaction to Discord for monitoring", "from decimal import Decimal from time import sleep import requests", "import hours from src.misc_utils import seconds_to_blocks from src.tx_utils import get_effective_gas_price", "= True, ): \"\"\"Private function to create, broadcast, confirm tx", "(contract, optional): Defaults to None. strategy_name (str, optional): Defaults to", "options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else: options[\"gasPrice\"] = self.__get_effective_gas_price()", "strategy: contract = None, strategy_name: str = None, ): try:", "get prices currency = BASE_CURRENCIES[self.chain] if self.chain == Network.Fantom: price_per_want", "get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash, max_target_block", "tx_hash, max_target_block def __send_tend_tx(self, strategy: contract) -> HexBytes: \"\"\"Sends transaction", "tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded:", ") finally: return tx_hash, max_target_block def __send_tend_tx(self, strategy: contract) ->", "price_per_want = get_token_price(want.address, currency, self.chain) self.logger.info(f\"price per want: {price_per_want} {currency}\")", "self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def __build_tend_transaction(self, strategy_address: str, options: dict) ->", "self.__get_effective_gas_price() if function == \"harvest\": estimated_gas = self.__estimate_harvest_gas(address, returns) elif", "Args: strategy (contract) Raises: Exception: If we have an issue", "don't double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash,", "function == \"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call() elif function == \"rewards_manager\":", "{self.__estimate_harvest_gas(address, returns)}\" ) return self.__build_harvest_transaction(address, returns, options) elif function ==", "which chain we're harvesting. EIP-1559 requires different handling for ETH", "), \"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain], } if self.chain == Network.Ethereum:", "between harvests for the supplied strategy and returns true if", ") else: # Don't care about poly/arbitrum self.last_harvest_times = {}", "badger api to get prices currency = BASE_CURRENCIES[self.chain] if self.chain", "url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash,", "tx Returns: dict: tx dictionary \"\"\" options = { \"nonce\":", "if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_no_return( self, strategy:", "we're harvesting. EIP-1559 requires different handling for ETH txs than", "keeper_address=self.keeper_address, ) def __process_harvest_mta( self, voter_proxy: contract, ): \"\"\"Private function", "HexBytes(0) try: tx = self.__build_transaction(strategy.address, function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction( tx,", "of 0x00. Returns: HexBytes: Transaction hash for transaction that was", "return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def __build_tend_transaction(self, strategy_address: str, options: dict)", "src.harvester import IHarvester from src.misc_utils import hours from src.misc_utils import", "config.enums import Network from src.harvester import IHarvester from src.misc_utils import", "sent. \"\"\" tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address, function=\"tend\")", "): \"\"\"Orchestration function that harvests outstanding rewards. Args: strategy (contract)", "{price_per_want} {currency}\") self.logger.info(f\"want gained: {want_gained}\") if type(want_gained) is list: want_gained", "# If successful, update last harvest harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx", "ETH node for confirmation. Args: voter_proxy (contract) Raises: Exception: If", "elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url,", "= requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\") elif self.chain", "is not whitelisted for {strategy_name}\") want_address = strategy.functions.want().call() want =", "EIP-1559 gas_price = get_effective_gas_price(self.web3) return gas_price def update_last_harvest_time(self, strategy_address: str):", "\"\"\"Private function to create, broadcast, confirm tx on eth and", "for confirmation. Args: strategy (contract) Raises: Exception: If we have", "to make sure we don't double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx =", "per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas", "every hour should_harvest = self.is_profitable() self.logger.info(f\"Should we harvest: {should_harvest}\") if", "try: tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key", "tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash !=", "get_token_price(want.address, currency, self.chain) self.logger.info(f\"price per want: {price_per_want} {currency}\") self.logger.info(f\"want gained:", "= self.__build_transaction(strategy.address, function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash", "get_token_price( want.address, currency, self.chain, use_staging=True ) else: price_per_want = get_token_price(want.address,", "handle profit estimation # current_price_eth = self.get_current_rewards_price() # self.logger.info(f\"current rewards", "= get_token_price(want.address, currency, self.chain) self.logger.info(f\"price per want: {price_per_want} {currency}\") self.logger.info(f\"want", "\"\"\" options = { \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ), \"from\":", "\"harvest\": estimated_gas = self.__estimate_harvest_gas(address, returns) elif function == \"tend\": estimated_gas", "= os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool = False, discord_url: str = None,", "not in [Network.Ethereum, Network.Fantom]: return True try: last_harvest = self.last_harvest_times[strategy.address]", "send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as", "which is measured in seconds Args: strategy (contract): Vault strategy", "Exception: If we have an issue sending transaction (unable to", "= int(1.1 * self.web3.eth.gas_price) # Estimated gas price + buffer", "self.logger.error(f\"Error processing harvest tx: {e}\") send_error_to_discord( strategy_name, \"Harvest\", error=e, chain=self.chain,", "tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) else: send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg,", "\"Harvest MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __send_harvest_tx(self, strategy: contract,", "transaction that was sent. \"\"\" max_target_block = None tx_hash =", "is not whitelisted for calling harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\")", "for monitoring Args: strategy (contract, optional): Defaults to None. strategy_name", "str, returns: bool) -> Decimal: if returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest(", "str) -> Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} ) )", "strategy_name: str = None, harvested: Decimal = None, returns: bool", "* self.web3.eth.gas_price) # Estimated gas price + buffer elif self.chain", "strategy.functions.getName().call() # TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise", "get_effective_gas_price(self.web3) return gas_price def update_last_harvest_time(self, strategy_address: str): self.last_harvest_times[strategy_address] = self.web3.eth.get_block(\"latest\")[", "time since last harvest is > harvest_interval_threshold, else False \"\"\"", "ACL is not whitelisted for calling harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address,", "strategy=strategy, strategy_name=strategy_name, ) def harvest_no_return( self, strategy: contract, ): strategy_name", "Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas(", "rewards price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, returns=False)", "= None tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address, returns=returns)", "self.web3 = web3 self.keeper_key = keeper_key self.keeper_address = keeper_address self.keeper_acl:", "= confirm_transaction(self.web3, tx_hash) if succeeded: # If successful, update last", "__send_harvest_tx(self, strategy: contract, returns: bool = True) -> HexBytes: \"\"\"Sends", "self.chain = chain self.web3 = web3 self.keeper_key = keeper_key self.keeper_address", "succeeded: gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got", "if pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, )", "hours(120) HARVEST_THRESHOLD = 0.0005 # min ratio of want to", "gas fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options) elif function == \"harvestMta\":", "= True) -> HexBytes: \"\"\"Sends transaction to ETH node for", "self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got", "def harvest_mta( self, voter_proxy: contract, ): # TODO: update for", "return self.__build_tend_transaction(address, options) elif function == \"harvestMta\": self.logger.info( f\"estimated gas", "= None, strategy_name: str = None, harvested: Decimal = None,", "estimation # current_price_eth = self.get_current_rewards_price() # self.logger.info(f\"current rewards price per", "in sending harvestMta tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest", "poly/arbitrum self.last_harvest_times = {} self.use_flashbots = use_flashbots self.discord_url = discord_url", "= want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") # TODO: figure out how", "gas_price def update_last_harvest_time(self, strategy_address: str): self.last_harvest_times[strategy_address] = self.web3.eth.get_block(\"latest\")[ \"timestamp\" ]", "Decimal: current_gas_price = self.__get_effective_gas_price() if function == \"harvest\": estimated_gas =", "self.__build_harvest_transaction(address, returns, options) elif function == \"tend\": self.logger.info(f\"estimated gas fee:", "longer than the supplied harvest_interval_threshold which is measured in seconds", "msg = confirm_transaction( self.web3, tx_hash, max_block=max_target_block ) if succeeded: #", "transaction to ETH node for confirmation. Args: strategy (contract) Raises:", "{should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self, strategy: contract): strategy_name =", "(ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas cost: {gas_fee}\")", "-> dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def estimate_gas_fee( self, address:", "import get_token_price from src.discord_utils import send_error_to_discord from src.discord_utils import send_success_to_discord", "return current_time - last_harvest > harvest_interval_threshold except KeyError: return True", "double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain", "{vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_harvest( strategy=strategy,", "# should_harvest = want_to_harvest / vault_balance >= HARVEST_THRESHOLD return True", "should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_no_return( self, strategy: contract,", "Decimal: if returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address}) else:", "= self.__get_effective_gas_price() if function == \"harvest\": estimated_gas = self.__estimate_harvest_gas(address, returns)", "else: # Don't care about poly/arbitrum self.last_harvest_times = {} self.use_flashbots", "strategy: contract = None, strategy_name: str = None, harvested: Decimal", "max_target_block = self.__send_harvest_tx( strategy, returns=returns ) succeeded, msg = confirm_transaction(", "{self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction( self, strategy_address: str,", "if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle = [ {\"signed_transaction\": signed_tx.rawTransaction},", "vault_balance >= HARVEST_THRESHOLD return True def __is_keeper_whitelisted(self, function: str) ->", "def __process_tend( self, strategy: contract = None, strategy_name: str =", "gained: {want_gained}\") if type(want_gained) is list: want_gained = 0 return", "profit estimation # current_price_eth = self.get_current_rewards_price() # self.logger.info(f\"current rewards price", "max_target_block def __send_tend_tx(self, strategy: contract) -> HexBytes: \"\"\"Sends transaction to", "token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\")", "\"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ), \"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain], }", "get_priority_fee from src.web3_utils import confirm_transaction from src.utils import get_abi from", "abi=get_abi(self.chain, \"erc20\"), ) want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} ) #", "ACL is not whitelisted for calling harvest\") want_address = strategy.functions.want().call()", "harvestNoReturn\" ) want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain,", "address=want_address, abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\")", "contract to use to build harvest tx Returns: dict: tx", "= self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except", "tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, )", "= HexBytes(0) try: tx = self.__build_transaction(strategy.address, returns=returns) signed_tx = self.web3.eth.account.sign_transaction(", "to Discord for monitoring Args: strategy (contract, optional): Defaults to", "per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas", "else: bundle = [ {\"signed_transaction\": signed_tx.rawTransaction}, ] block_number = self.web3.eth.block_number", "TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL", "= self.__send_harvest_tx( strategy, returns=returns ) succeeded, msg = confirm_transaction( self.web3,", "NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at {max_target_block}\") except ValueError as e: self.logger.error(f\"Error", "signed_tx.hash if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle = [ {\"signed_transaction\":", "import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD = 0.0005 #", "except ValueError as e: self.logger.error(f\"Error in sending tend tx: {e}\")", "function calls, False otherwise. \"\"\" if function in [\"harvest\", \"harvestMta\"]:", "harvest tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address", "ACL if not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL is not whitelisted", "to None. strategy_name (str, optional): Defaults to None. harvested (Decimal,", "harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain )", "optional): Defaults to None. harvested (Decimal, optional): Amount of Sushi", ") except Exception as e: self.logger.error(f\"Error processing harvest tx: {e}\")", "e: self.logger.error(f\"Error processing harvest tx: {e}\") send_error_to_discord( strategy_name, \"Harvest\", error=e,", "from src.utils import get_abi from src.discord_utils import get_hash_from_failed_tx_error from src.web3_utils", "return a tx_hash of 0x00. Returns: HexBytes: Transaction hash for", "contract): strategy_name = strategy.functions.getName().call() # TODO: update for ACL if", "change is > 0.05% of total vault assets # should_harvest", "calling tend\") # TODO: figure out how to handle profit", "self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) else: send_error_to_discord(", "whitelisted to make function calls, False otherwise. \"\"\" if function", "strategy=strategy, strategy_name=strategy_name, ) def harvest_mta( self, voter_proxy: contract, ): #", "that was sent. \"\"\" tx_hash = HexBytes(0) try: tx =", "of last harvest if self.chain in [Network.Ethereum, Network.Fantom]: self.last_harvest_times =", ") elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain,", "ratio of want to total vault AUM required to harvest", "on which chain we're harvesting. EIP-1559 requires different handling for", "self.chain == Network.Ethereum: # EIP-1559 gas_price = get_effective_gas_price(self.web3) return gas_price", "txs than the other EVM chains. Args: contract (contract): contract", "self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL is not whitelisted for calling harvestMta\")", "we have an issue sending transaction (unable to communicate with", "os.getenv(\"KEEPER_KEY\"), base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool = False, discord_url:", "Transaction hash for transaction that was sent. \"\"\" max_target_block =", "= self.is_profitable() self.logger.info(f\"Should we call harvestMta: {should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy)", "def estimate_gas_fee( self, address: str, returns: bool = True, function:", "MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"), ) if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper", "hash for transaction that was sent. \"\"\" tx_hash = HexBytes(0)", "strategy_address: str, options: dict) -> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options )", "strategy, returns=returns ) succeeded, msg = confirm_transaction( self.web3, tx_hash, max_block=max_target_block", "return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def __build_harvest_mta_transaction( self, voter_proxy_address: str, options:", "Contract from config.constants import BASE_CURRENCIES from config.constants import GAS_LIMITS from", "src.tx_utils import get_effective_gas_price from src.tx_utils import get_gas_price_of_tx from src.tx_utils import", "if type(want_gained) is list: want_gained = 0 return price_per_want *", "None, ): try: tx_hash = self.__send_tend_tx(strategy) succeeded, _ = confirm_transaction(self.web3,", "10 ** want.functions.decimals().call() ) self.logger.info(f\"estimated want change: {want_to_harvest}\") # TODO:", "in sending tend tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Tend\",", "Args: contract (contract): contract to use to build harvest tx", "measured in seconds Args: strategy (contract): Vault strategy web3 contract", "return price_per_want * want_gained def is_profitable(self) -> bool: # TODO:", "function=\"tend\") self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name, ) def", "= None, ): try: tx_hash = self.__send_tend_tx(strategy) succeeded, _ =", "transaction (unable to communicate with node, etc.) we log the", "= 6 class GeneralHarvester(IHarvester): def __init__( self, chain: Network =", "Sushi harvested. Defaults to None. \"\"\" try: tx_hash, max_target_block =", "time in seconds that is acceptable to not have harvested", "= self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas cost: {gas_fee}\") should_harvest_mta = self.is_profitable()", "create, broadcast, confirm tx on eth and then send transaction", "HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception", "{strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error", "for transaction that was sent. \"\"\" tx_hash = HexBytes(0) try:", "def __estimate_harvest_mta_gas(self, voter_proxy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\":", "is acceptable to not have harvested within. Defaults to MAX_TIME_BETWEEN_HARVESTS.", "using is whitelisted for the strategy. Returns: bool: True if", "import os from decimal import Decimal from time import sleep", "e: self.logger.error(f\"Error processing tend tx: {e}\") send_error_to_discord( strategy_name, \"Tend\", error=e,", "outstanding rewards. Args: strategy (contract) Raises: ValueError: If the keeper", "self.chain not in [Network.Ethereum, Network.Fantom]: return True try: last_harvest =", "function: str) -> bool: \"\"\"Checks if the bot we're using", "= get_hash_from_failed_tx_error( e, \"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address ) finally: return", "= want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest = ( self.estimate_harvest_amount(strategy) /", "str = os.getenv(\"KEEPER_KEY\"), base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool =", "0 return price_per_want * want_gained def is_profitable(self) -> bool: #", "If the keeper isn't whitelisted, throw an error and alert", "from src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD =", "not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL is not whitelisted for calling", "Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __estimate_harvest_mta_gas(self,", "web3: Web3 = None, keeper_acl: str = os.getenv(\"KEEPER_ACL\"), keeper_address: str", "ETH node for confirmation. Args: strategy (contract) Raises: Exception: If", "self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL is not whitelisted for calling harvest\")", "Args: voter_proxy (contract) Raises: Exception: If we have an issue", "is > 0.05% of total vault assets # should_harvest =", "MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0):", "return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __estimate_harvest_mta_gas(self, voter_proxy_address:", "voter_proxy: contract, ): \"\"\"Private function to create, broadcast, confirm tx", "than the supplied harvest_interval_threshold which is measured in seconds Args:", "= use_flashbots self.discord_url = discord_url def is_time_to_harvest( self, strategy: contract.Contract,", "import send_error_to_discord from src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120)", "* want_gained def is_profitable(self) -> bool: # TODO: Implement this", "transaction depending on which chain we're harvesting. EIP-1559 requires different", "tx_hash, max_target_block = self.__send_harvest_tx( strategy, returns=returns ) succeeded, msg =", "Defaults to None. strategy_name (str, optional): Defaults to None. harvested", "if returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest", "HexBytes(0): if not self.use_flashbots: # And if pending self.update_last_harvest_time(strategy.address) send_success_to_discord(", "options) elif function == \"tend\": self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\") return", ") return self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction( self, strategy_address: str, returns:", "- last_harvest > harvest_interval_threshold except KeyError: return True def harvest(", "{e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address ) finally:", "(contract): Vault strategy web3 contract object harvest_interval_threshold (int, optional): Amount", "self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper ACL is not whitelisted for calling", "= 0.0005 # min ratio of want to total vault", "\"\", \"Harvest MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __send_harvest_tx(self, strategy:", "{\"from\": self.keeper_address} ) # call badger api to get prices", "estimated_gas) def __estimate_harvest_gas(self, strategy_address: str, returns: bool) -> Decimal: if", "HexBytes: \"\"\"Sends transaction to ETH node for confirmation. Args: voter_proxy", "self.base_usd_oracle: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), ) # Times", "= self.__estimate_harvest_gas(address, returns) elif function == \"tend\": estimated_gas = self.__estimate_tend_gas(address)", "str = os.getenv(\"KEEPER_ACL\"), keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str =", "ValueError: If the keeper isn't whitelisted, throw an error and", "different handling for ETH txs than the other EVM chains.", "= discord_url def is_time_to_harvest( self, strategy: contract.Contract, harvest_interval_threshold: int =", "src.misc_utils import seconds_to_blocks from src.tx_utils import get_effective_gas_price from src.tx_utils import", "{\"signed_transaction\": signed_tx.rawTransaction}, ] block_number = self.web3.eth.block_number for i in range(1,", "self.chain == Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\") *", "bool = True, function: str = \"harvest\" ) -> Decimal:", "-> bool: \"\"\"Checks if the bot we're using is whitelisted", "ValueError(\"Keeper ACL is not whitelisted for calling tend\") # TODO:", "chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): if not self.use_flashbots:", "options: dict ) -> dict: if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options", "import Decimal from time import sleep import requests from hexbytes", "tend(self, strategy: contract): strategy_name = strategy.functions.getName().call() # TODO: update for", "whitelisted for calling harvestNoReturn\" ) want_address = strategy.functions.want().call() want =", "ACL is not whitelisted for calling harvestNoReturn\" ) want_address =", "harvest tx Returns: dict: tx dictionary \"\"\" options = {", "to build harvest tx Returns: dict: tx dictionary \"\"\" options", "False \"\"\" # Only care about harvest gas costs on", "to MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True if time since last harvest", "dict ) -> dict: if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options )", "{currency}\") self.logger.info(f\"want gained: {want_gained}\") if type(want_gained) is list: want_gained =", "finally: return tx_hash def __build_transaction( self, address: str, returns: bool", "self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"), ) if not self.__is_keeper_whitelisted(\"rewards_manager\"):", "vault assets # should_harvest = want_to_harvest / vault_balance >= HARVEST_THRESHOLD", "self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options) elif function ==", "Raises: ValueError: If the keeper isn't whitelisted, throw an error", "for calling harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas cost:", "\"Harvest\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash, max_target_block def __send_tend_tx(self,", ").estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address: str) -> Decimal:", "gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx,", "int: if self.chain == Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price =", "as e: self.logger.error(f\"Error in sending tend tx: {e}\") tx_hash =", "self.__build_transaction(strategy.address, function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash =", "harvest harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash,", "harvestMta tx: {e}\") send_error_to_discord( \"\", \"Harvest MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address,", ") except Exception as e: self.logger.error(f\"Error processing tend tx: {e}\")", "except KeyError: return True def harvest( self, strategy: contract.Contract, ):", "_ = confirm_transaction(self.web3, tx_hash) if succeeded: # If successful, update", "signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash if", "self.last_harvest_times = {} self.use_flashbots = use_flashbots self.discord_url = discord_url def", "= self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault", "def __build_harvest_mta_transaction( self, voter_proxy_address: str, options: dict ) -> dict:", "Args: strategy (contract) Raises: ValueError: If the keeper isn't whitelisted,", "want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"), )", "self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price * estimated_gas) def __estimate_harvest_gas(self, strategy_address: str, returns:", "{gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif", "tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash !=", "if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self, strategy: contract): strategy_name = strategy.functions.getName().call()", "harvest_interval_threshold (int, optional): Amount of time in seconds that is", "# TODO: Implement this # harvest if ideal want change", "import HexBytes from web3 import Web3 from web3 import contract", "\"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address, ) except Exception as e:", "/ vault_balance >= HARVEST_THRESHOLD return True def __is_keeper_whitelisted(self, function: str)", "harvested. Defaults to None. \"\"\" try: tx_hash, max_target_block = self.__send_harvest_tx(", "not whitelisted for {strategy_name}\") want_address = strategy.functions.want().call() want = self.web3.eth.contract(", "type(want_gained) is list: want_gained = 0 return price_per_want * want_gained", ") else: send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address, )", "\"\"\"Builds transaction depending on which chain we're harvesting. EIP-1559 requires", "== \"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call() elif function == \"rewards_manager\": key", "key = self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend( self, strategy:", "chain self.web3 = web3 self.keeper_key = keeper_key self.keeper_address = keeper_address", "self.logger.error(f\"Error processing tend tx: {e}\") send_error_to_discord( strategy_name, \"Tend\", error=e, chain=self.chain,", "function: str = \"harvest\" ) -> dict: \"\"\"Builds transaction depending", "# for now we'll just harvest every hour should_harvest =", "max_block=max_target_block ) if succeeded: # If successful, update last harvest", "self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error in sending tend tx:", "to harvest NUM_FLASHBOTS_BUNDLES = 6 class GeneralHarvester(IHarvester): def __init__( self,", "HARVEST_THRESHOLD return True def __is_keeper_whitelisted(self, function: str) -> bool: \"\"\"Checks", "the keeper isn't whitelisted, throw an error and alert user.", "= confirm_transaction( self.web3, tx_hash, max_block=max_target_block ) if succeeded: # If", "MTA\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def __build_transaction( self,", "hash for transaction that was sent. \"\"\" max_target_block = None", "not whitelisted for calling harvest\") want_address = strategy.functions.want().call() want =", "gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now", "{gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif", "Defaults to None. \"\"\" try: tx_hash, max_target_block = self.__send_harvest_tx( strategy,", "Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __get_effective_gas_price(self) -> int:", "harvest # time to make sure we don't double harvest", "want_gained = 0 return price_per_want * want_gained def is_profitable(self) ->", "have an issue sending transaction (unable to communicate with node,", "should_harvest_mta = self.is_profitable() self.logger.info(f\"Should we call harvestMta: {should_harvest_mta}\") if should_harvest_mta:", "= { \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ), \"from\": self.keeper_address, \"gas\":", "# current_price_eth = self.get_current_rewards_price() # self.logger.info(f\"current rewards price per token", "fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options) elif function == \"harvestMta\": self.logger.info(", "self.__process_tend( strategy=strategy, strategy_name=strategy_name, ) def tend_then_harvest(self, strategy: contract): self.tend(strategy) sleep(60)", "want: {price_per_want} {currency}\") self.logger.info(f\"want gained: {want_gained}\") if type(want_gained) is list:", "self.use_flashbots = use_flashbots self.discord_url = discord_url def is_time_to_harvest( self, strategy:", "Transaction hash for transaction that was sent. \"\"\" tx_hash =", "tx_hash, max_block=max_target_block ) if succeeded: # If successful, update last", "MULTICHAIN_CONFIG from config.enums import Network from src.harvester import IHarvester from", "except ValueError as e: self.logger.error(f\"Error in sending harvestMta tx: {e}\")", "has been longer than the supplied harvest_interval_threshold which is measured", "self.web3, tx_hash, max_block=max_target_block ) if succeeded: # If successful, update", "signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error in sending harvestMta", "{should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_no_return( self,", "# self.logger.info(f\"current rewards price per token (ETH): {current_price_eth}\") gas_fee =", "seconds_to_blocks from src.tx_utils import get_effective_gas_price from src.tx_utils import get_gas_price_of_tx from", "import confirm_transaction from src.utils import get_abi from src.discord_utils import get_hash_from_failed_tx_error", "= MAX_TIME_BETWEEN_HARVESTS, ) -> bool: \"\"\"Calculates the time between harvests", "\"\"\" max_target_block = None tx_hash = HexBytes(0) try: tx =", "= self.is_profitable() self.logger.info(f\"Should we harvest: {should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy,", "other EVM chains. Args: contract (contract): contract to use to", "options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else: options[\"gasPrice\"] = self.__get_effective_gas_price() if function ==", "target_block_number=block_number + i ) max_target_block = block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle", "if ideal want change is > 0.05% of total vault", "self, strategy: contract = None, strategy_name: str = None, harvested:", "key = self.keeper_acl.functions.TENDER_ROLE().call() elif function == \"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call()", "self, strategy: contract, ): strategy_name = strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract(", "harvest_no_return( self, strategy: contract, ): strategy_name = strategy.functions.getName().call() # TODO:", "chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest( self, strategy: contract = None,", "function == \"harvestMta\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\" ) return", "use to build harvest tx Returns: dict: tx dictionary \"\"\"", "tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): if", "options: dict) -> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def __build_harvest_mta_transaction(", "\"pending\" ), \"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain], } if self.chain ==", "tx_hash = get_hash_from_failed_tx_error( e, \"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address ) finally:", "contract \"\"\" try: tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded, _ = confirm_transaction(self.web3,", "= self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\") elif self.chain in [Network.Arbitrum, Network.Fantom]:", "except ValueError as e: self.logger.error(f\"Error in sending harvest tx: {e}\")", "= logging.getLogger(__name__) self.chain = chain self.web3 = web3 self.keeper_key =", "function == \"harvest\": estimated_gas = self.__estimate_harvest_gas(address, returns) elif function ==", "MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True if time since last harvest is", "strategy_name=strategy_name, ) def tend_then_harvest(self, strategy: contract): self.tend(strategy) sleep(60) self.harvest(strategy) def", "= get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got gas price", "voter_proxy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} )", "= strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"),", "and then send transaction to Discord for monitoring Args: voter_proxy", "last_harvest = self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since last", "strategy.functions.getName().call() # TODO: update for ACL if not self.__is_keeper_whitelisted(\"tend\"): raise", ") def __build_harvest_mta_transaction( self, voter_proxy_address: str, options: dict ) ->", "web3.contract import Contract from config.constants import BASE_CURRENCIES from config.constants import", "as e: self.logger.error(f\"Error processing harvest tx: {e}\") send_error_to_discord( strategy_name, \"Harvest\",", "get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def", "strategy_name=strategy_name, ) def harvest_mta( self, voter_proxy: contract, ): # TODO:", "self.logger.info(f\"price per want: {price_per_want} {currency}\") self.logger.info(f\"want gained: {want_gained}\") if type(want_gained)", "to ETH node for confirmation. Args: voter_proxy (contract) Raises: Exception:", "= want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas", "bool: True if time since last harvest is > harvest_interval_threshold,", "ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper ACL is not", "optional): Amount of Sushi harvested. Defaults to None. \"\"\" try:", "chain=self.chain, url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error processing tend", "token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas cost:", "self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ), \"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain], } if", "= HexBytes(0) try: tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction(", "str, returns: bool, options: dict ) -> dict: if returns:", "# Don't care about poly/arbitrum self.last_harvest_times = {} self.use_flashbots =", "logging.getLogger(__name__) self.chain = chain self.web3 = web3 self.keeper_key = keeper_key", "from src.discord_utils import get_hash_from_failed_tx_error from src.web3_utils import get_last_harvest_times from src.token_utils", "current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since last harvest: {(current_time -", "depending on which chain we're harvesting. EIP-1559 requires different handling", "want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call()", "# And if pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain,", "not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle = [ {\"signed_transaction\": signed_tx.rawTransaction}, ]", "__process_tend( self, strategy: contract = None, strategy_name: str = None,", "\"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call() elif function == \"rewards_manager\": key =", "elif tx_hash != HexBytes(0): if not self.use_flashbots: # And if", "MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD = 0.0005 # min ratio of", "self.use_flashbots: # And if pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash,", "\"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") # TODO:", "update for ACL if not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL is", "= 0 return price_per_want * want_gained def is_profitable(self) -> bool:", ") ) def __get_effective_gas_price(self) -> int: if self.chain == Network.Polygon:", "= get_effective_gas_price(self.web3) return gas_price def update_last_harvest_time(self, strategy_address: str): self.last_harvest_times[strategy_address] =", "chain=self.chain, url=self.discord_url, ) else: send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain,", "True def __is_keeper_whitelisted(self, function: str) -> bool: \"\"\"Checks if the", "Web3 from web3 import contract from web3.contract import Contract from", "tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction)", "tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address, ) except Exception as e: self.logger.error(f\"Error", ") self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\",", "if self.chain in [Network.Ethereum, Network.Fantom]: self.last_harvest_times = get_last_harvest_times( self.web3, self.keeper_acl,", "== \"harvestMta\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address,", "harvestMta: {should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self, strategy: contract): strategy_name", "function == \"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price * estimated_gas)", "chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash, max_target_block def __send_tend_tx(self, strategy:", "confirm_transaction(self.web3, tx_hash) if succeeded: # If successful, update last harvest", "import Web3 from web3 import contract from web3.contract import Contract", "None, keeper_acl: str = os.getenv(\"KEEPER_ACL\"), keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key:", "self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_rewards_manager( self, strategy: contract, ):", "node for confirmation. Args: strategy (contract) Raises: Exception: If we", "tx_hash = HexBytes(0) try: tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx =", "keeper_address=self.keeper_address, ) def __send_harvest_tx(self, strategy: contract, returns: bool = True)", "if not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL is not whitelisted for", "= [ {\"signed_transaction\": signed_tx.rawTransaction}, ] block_number = self.web3.eth.block_number for i", "+ NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at {max_target_block}\") except ValueError as e:", "tend tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address", "== Network.Ethereum: # EIP-1559 gas_price = get_effective_gas_price(self.web3) return gas_price def", "list: want_gained = 0 return price_per_want * want_gained def is_profitable(self)", "update last harvest harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3,", "cost: {gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name, ) def tend_then_harvest(self, strategy: contract):", "True, function: str = \"harvest\" ) -> dict: \"\"\"Builds transaction", "tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, )", "bool: \"\"\"Calculates the time between harvests for the supplied strategy", "options ) def __build_harvest_mta_transaction( self, voter_proxy_address: str, options: dict )", "= self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle: Contract = self.web3.eth.contract(", "optional): Amount of time in seconds that is acceptable to", "= BASE_CURRENCIES[self.chain] if self.chain == Network.Fantom: price_per_want = get_token_price( want.address,", "node for confirmation. Args: voter_proxy (contract) Raises: Exception: If we", "__build_harvest_transaction( self, strategy_address: str, returns: bool, options: dict ) ->", "from web3 import Web3 from web3 import contract from web3.contract", "gas cost: {gas_fee}\") # for now we'll just harvest every", "figure out how to handle profit estimation # current_price_eth =", "chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\",", "tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key )", ") want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} ) # call badger", "block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at {max_target_block}\") except ValueError as", "rewards price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\")", "strategy_name, \"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest_mta( self, voter_proxy:", "decimal import Decimal from time import sleep import requests from", "as e: self.logger.error(f\"Error processing harvestMta tx: {e}\") send_error_to_discord( \"\", \"Harvest", "from src.web3_utils import get_last_harvest_times from src.token_utils import get_token_price from src.discord_utils", ") return current_time - last_harvest > harvest_interval_threshold except KeyError: return", "cost: {gas_fee}\") should_harvest_mta = self.is_profitable() self.logger.info(f\"Should we call harvestMta: {should_harvest_mta}\")", "return Decimal(current_gas_price * estimated_gas) def __estimate_harvest_gas(self, strategy_address: str, returns: bool)", "and then send transaction to Discord for monitoring Args: strategy", "tx_hash = get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address ) finally: return", "[Network.Ethereum, Network.Fantom]: self.last_harvest_times = get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS),", "self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def __build_harvest_mta_transaction( self, voter_proxy_address: str, options: dict", "= self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), ) # Times of last", "# EIP-1559 gas_price = get_effective_gas_price(self.web3) return gas_price def update_last_harvest_time(self, strategy_address:", "= self.web3.eth.block_number for i in range(1, NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle(", "__estimate_harvest_gas(self, strategy_address: str, returns: bool) -> Decimal: if returns: estimated_gas_to_harvest", "time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain )", "want_to_harvest = ( self.estimate_harvest_amount(strategy) / 10 ** want.functions.decimals().call() ) self.logger.info(f\"estimated", "self.web3.eth.block_number for i in range(1, NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle( bundle,", "= get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else: options[\"gasPrice\"] = self.__get_effective_gas_price() if", "options ) else: return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def __build_tend_transaction(self, strategy_address:", "bot is whitelisted to make function calls, False otherwise. \"\"\"", "tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord(", "keeper_key self.keeper_address = keeper_address self.keeper_acl: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain,", "function: str = \"harvest\" ) -> Decimal: current_gas_price = self.__get_effective_gas_price()", "os.getenv(\"KEEPER_ACL\"), keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str = os.getenv(\"KEEPER_KEY\"), base_oracle_address:", "<gh_stars>0 import logging import os from decimal import Decimal from", "bool) -> Decimal: if returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\":", "address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), ) # Times of last harvest if", "class GeneralHarvester(IHarvester): def __init__( self, chain: Network = Network.Ethereum, web3:", "= self.get_current_rewards_price() # self.logger.info(f\"current rewards price per token (ETH): {current_price_eth}\")", "True, function: str = \"harvest\" ) -> Decimal: current_gas_price =", "the error and return a tx_hash of 0x00. Returns: HexBytes:", "last harvest harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle,", "!= HexBytes(0): if not self.use_flashbots: # And if pending self.update_last_harvest_time(strategy.address)", "ideal want change is > 0.05% of total vault assets", "seconds Args: strategy (contract): Vault strategy web3 contract object harvest_interval_threshold", "estimate_gas_fee( self, address: str, returns: bool = True, function: str", "Network.Fantom]: self.last_harvest_times = get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain,", "in sending harvest tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest\",", "is not whitelisted for calling harvest\") want_address = strategy.functions.want().call() want", "balance: {vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_harvest(", "self.logger.error(f\"Error in sending tend tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e,", "{} self.use_flashbots = use_flashbots self.discord_url = discord_url def is_time_to_harvest( self,", "\"harvestMta\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address, options)", "chain=self.chain, keeper_address=self.keeper_address, ) except Exception as e: self.logger.error(f\"Error processing harvest", "not self.use_flashbots: # And if pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\",", "returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest =", "bool = False, discord_url: str = None, ): self.logger =", "self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash,", "vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest = ( self.estimate_harvest_amount(strategy)", "= get_token_price( want.address, currency, self.chain, use_staging=True ) else: price_per_want =", "calling harvest\") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain,", "self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got", "range(1, NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number + i )", ") def __send_harvest_tx(self, strategy: contract, returns: bool = True) ->", "(ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") #", "buffer elif self.chain == Network.Ethereum: # EIP-1559 gas_price = get_effective_gas_price(self.web3)", "import logging import os from decimal import Decimal from time", "# Only care about harvest gas costs on eth if", "url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error processing harvestMta tx:", "gas_price = self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\") elif self.chain in [Network.Arbitrum,", "str = \"harvest\" ) -> dict: \"\"\"Builds transaction depending on", "dictionary \"\"\" options = { \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ),", "make function calls, False otherwise. \"\"\" if function in [\"harvest\",", "self.chain ) self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest", "key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif function == \"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call()", "None, strategy_name: str = None, ): try: tx_hash = self.__send_tend_tx(strategy)", "self.logger.info(f\"current rewards price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address)", "estimate_harvest_amount(self, strategy: contract) -> Decimal: want = self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain,", "send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD = 0.0005 # min", "a tx_hash of 0x00. Returns: HexBytes: Transaction hash for transaction", "out how to handle profit estimation # current_price_eth = self.get_current_rewards_price()", "self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend( self, strategy: contract = None, strategy_name:", "the time between harvests for the supplied strategy and returns", "else: send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address, ) except", "-> Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} ) ) def", "self.keeper_address}) return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address: str) -> Decimal: return", "that was sent. \"\"\" max_target_block = None tx_hash = HexBytes(0)", "): try: tx_hash = self.__send_tend_tx(strategy) succeeded, _ = confirm_transaction(self.web3, tx_hash)", "\"harvest\" ) -> Decimal: current_gas_price = self.__get_effective_gas_price() if function ==", "update last harvest harvest # time to make sure we", "strategy: contract.Contract, ): \"\"\"Orchestration function that harvests outstanding rewards. Args:", "if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else: return self.keeper_acl.functions.harvestNoReturn( strategy_address", "Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\")", "abi=get_abi(self.chain, \"oracle\"), ) # Times of last harvest if self.chain", "tx_hash != HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, )", "last harvest is > harvest_interval_threshold, else False \"\"\" # Only", "tx_hash of 0x00. Returns: HexBytes: Transaction hash for transaction that", "isn't whitelisted, throw an error and alert user. \"\"\" strategy_name", "NUM_FLASHBOTS_BUNDLES = 6 class GeneralHarvester(IHarvester): def __init__( self, chain: Network", "self.logger.error(f\"Error processing harvestMta tx: {e}\") send_error_to_discord( \"\", \"Harvest MTA\", error=e,", "tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address )", "dict: if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else: return self.keeper_acl.functions.harvestNoReturn(", "self.__get_effective_gas_price() if function == \"harvest\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_gas(address,", "tx: {e}\") send_error_to_discord( strategy_name, \"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def", "dict ) -> dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def estimate_gas_fee(", ") succeeded, msg = confirm_transaction( self.web3, tx_hash, max_block=max_target_block ) if", "self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def", "whitelisted for {strategy_name}\") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address,", "cost: {gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_mta( self, voter_proxy:", "self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError", "def __estimate_harvest_gas(self, strategy_address: str, returns: bool) -> Decimal: if returns:", "f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction(", "-> Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} ) ) def", "bool: # TODO: Implement this # harvest if ideal want", "Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __get_effective_gas_price(self)", "than the other EVM chains. Args: contract (contract): contract to", "def __init__( self, chain: Network = Network.Ethereum, web3: Web3 =", "for now we'll just harvest every hour should_harvest = self.is_profitable()", "gas costs on eth if self.chain not in [Network.Ethereum, Network.Fantom]:", "from web3.contract import Contract from config.constants import BASE_CURRENCIES from config.constants", "harvest: {(current_time - last_harvest) / 3600}\" ) return current_time -", "self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now we'll", "options: dict ) -> dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def", "harvest( self, strategy: contract.Contract, ): \"\"\"Orchestration function that harvests outstanding", "estimated_gas = self.__estimate_tend_gas(address) elif function == \"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address)", "def __send_harvest_tx(self, strategy: contract, returns: bool = True) -> HexBytes:", "= self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash", "should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_rewards_manager( self, strategy: contract,", "def is_time_to_harvest( self, strategy: contract.Contract, harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS, )", "# If successful, update last harvest harvest # time to", "def __is_keeper_whitelisted(self, function: str) -> bool: \"\"\"Checks if the bot", "finally: return tx_hash, max_target_block def __send_tend_tx(self, strategy: contract) -> HexBytes:", "e: self.logger.error(f\"Error in sending harvestMta tx: {e}\") tx_hash = get_hash_from_failed_tx_error(", "if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper ACL is not whitelisted", "optional): Defaults to None. strategy_name (str, optional): Defaults to None.", "tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address )", "def __build_tend_transaction(self, strategy_address: str, options: dict) -> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction(", "web3 self.keeper_key = keeper_key self.keeper_address = keeper_address self.keeper_acl: Contract =", "self.keeper_address} ) # call badger api to get prices currency", "should_harvest = want_to_harvest / vault_balance >= HARVEST_THRESHOLD return True def", "self.__build_tend_transaction(address, options) elif function == \"harvestMta\": self.logger.info( f\"estimated gas fee:", "\"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif function == \"tend\": key =", "an issue sending transaction (unable to communicate with node, etc.)", "function in [\"harvest\", \"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif function ==", "from src.token_utils import get_token_price from src.discord_utils import send_error_to_discord from src.discord_utils", "send_error_to_discord( strategy_name, \"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest_mta( self,", "return tx_hash def __send_harvest_mta_tx(self, voter_proxy: contract) -> HexBytes: \"\"\"Sends transaction", "options) elif function == \"harvestMta\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\"", "tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as e:", "= self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} ) # call badger api to", "returns: bool) -> Decimal: if returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address", "True def harvest( self, strategy: contract.Contract, ): \"\"\"Orchestration function that", "{e}\") send_error_to_discord( strategy_name, \"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest(", "not have harvested within. Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True", "self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got gas price of tx:", "issue sending transaction (unable to communicate with node, etc.) we", "returns: bool, options: dict ) -> dict: if returns: return", "since last harvest is > harvest_interval_threshold, else False \"\"\" #", ") def __get_effective_gas_price(self) -> int: if self.chain == Network.Polygon: response", "from src.web3_utils import confirm_transaction from src.utils import get_abi from src.discord_utils", "price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain,", "= None, harvested: Decimal = None, returns: bool = True,", "None, ): self.logger = logging.getLogger(__name__) self.chain = chain self.web3 =", "gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name,", "{gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name, ) def tend_then_harvest(self, strategy: contract): self.tend(strategy)", "Only care about harvest gas costs on eth if self.chain", "self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), ) want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address}", "= signed_tx.hash if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle = [", "__build_tend_transaction(self, strategy_address: str, options: dict) -> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options", "processing tend tx: {e}\") send_error_to_discord( strategy_name, \"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address,", "Args: voter_proxy (contract): Mstable voter proxy contract \"\"\" try: tx_hash", "self.chain in [Network.Arbitrum, Network.Fantom]: gas_price = int(1.1 * self.web3.eth.gas_price) #", "prices currency = BASE_CURRENCIES[self.chain] if self.chain == Network.Fantom: price_per_want =", "successful, update last harvest harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx(", "\"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") gas_fee =", "self.__build_transaction(strategy.address, returns=returns) signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash =", "src.web3_utils import confirm_transaction from src.utils import get_abi from src.discord_utils import", "(contract) Raises: ValueError: If the keeper isn't whitelisted, throw an", "{should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_rewards_manager( self,", "ValueError(\"Keeper ACL is not whitelisted for calling harvest\") want_address =", "keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str = os.getenv(\"KEEPER_KEY\"), base_oracle_address: str", "= block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at {max_target_block}\") except ValueError", "should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self, strategy: contract): strategy_name = strategy.functions.getName().call() #", "= signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error in sending", ") def __process_harvest_mta( self, voter_proxy: contract, ): \"\"\"Private function to", "strategy_name = strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain,", "gas cost: {gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_mta( self,", "If successful, update last harvest harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx =", ") max_target_block = block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at {max_target_block}\")", "(contract) Raises: Exception: If we have an issue sending transaction", "tx = self.__build_transaction(strategy.address, function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key )", "str) -> bool: \"\"\"Checks if the bot we're using is", "get_hash_from_failed_tx_error( e, \"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash", "self, strategy: contract, ): strategy_name = strategy.functions.getName().call() # TODO: update", "\"\"\"Calculates the time between harvests for the supplied strategy and", "from config.constants import GAS_LIMITS from config.constants import MULTICHAIN_CONFIG from config.enums", "is > harvest_interval_threshold, else False \"\"\" # Only care about", "options) def __build_harvest_transaction( self, strategy_address: str, returns: bool, options: dict", ") self.logger.info(f\"estimated want change: {want_to_harvest}\") # TODO: figure out how", "tx_hash) if succeeded: # If successful, update last harvest harvest", "if not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL is not whitelisted for", "self, voter_proxy_address: str, options: dict ) -> dict: return self.keeper_acl.functions.harvestMta(", "else: price_per_want = get_token_price(want.address, currency, self.chain) self.logger.info(f\"price per want: {price_per_want}", "self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address, options) def", "= strategy.functions.getName().call() # TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"):", "harvest is > harvest_interval_threshold, else False \"\"\" # Only care", "url=self.discord_url, ) else: send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address,", "last_harvest) / 3600}\" ) return current_time - last_harvest > harvest_interval_threshold", "e, \"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def", "ValueError(\"Keeper ACL is not whitelisted for calling harvestMta\") gas_fee =", "harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas cost: {gas_fee}\") should_harvest_mta", "if self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price()", "] block_number = self.web3.eth.block_number for i in range(1, NUM_FLASHBOTS_BUNDLES +", "= \"harvest\" ) -> Decimal: current_gas_price = self.__get_effective_gas_price() if function", "in [Network.Ethereum, Network.Fantom]: self.last_harvest_times = get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number -", "False otherwise. \"\"\" if function in [\"harvest\", \"harvestMta\"]: key =", "and alert user. \"\"\" strategy_name = strategy.functions.getName().call() # TODO: update", "whitelisted for the strategy. Returns: bool: True if our bot", "e: self.logger.error(f\"Error processing harvestMta tx: {e}\") send_error_to_discord( \"\", \"Harvest MTA\",", "tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, )", "str = \"harvest\" ) -> Decimal: current_gas_price = self.__get_effective_gas_price() if", "= os.getenv(\"KEEPER_KEY\"), base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool = False,", "succeeded: # If successful, update last harvest harvest time self.update_last_harvest_time(voter_proxy.address)", "HexBytes: Transaction hash for transaction that was sent. \"\"\" tx_hash", "{(current_time - last_harvest) / 3600}\" ) return current_time - last_harvest", "# Times of last harvest if self.chain in [Network.Ethereum, Network.Fantom]:", "= self.keeper_acl.functions.TENDER_ROLE().call() elif function == \"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call() return", "abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest", "self.keeper_address, \"gas\": GAS_LIMITS[self.chain], } if self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"] =", "tx_hash, self.chain ) self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord(", "true if it has been longer than the supplied harvest_interval_threshold", "strategy_name=strategy_name, ) def harvest_rewards_manager( self, strategy: contract, ): strategy_name =", "contract object harvest_interval_threshold (int, optional): Amount of time in seconds", "price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated", "harvests for the supplied strategy and returns true if it", ") self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\",", "= self.__send_tend_tx(strategy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded: gas_price_of_tx", "is not whitelisted for calling tend\") # TODO: figure out", "return gas_price def update_last_harvest_time(self, strategy_address: str): self.last_harvest_times[strategy_address] = self.web3.eth.get_block(\"latest\")[ \"timestamp\"", "\"tend\": estimated_gas = self.__estimate_tend_gas(address) elif function == \"harvestMta\": estimated_gas =", "= ( self.estimate_harvest_amount(strategy) / 10 ** want.functions.decimals().call() ) self.logger.info(f\"estimated want", "strategy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} )", "from time import sleep import requests from hexbytes import HexBytes", "import contract from web3.contract import Contract from config.constants import BASE_CURRENCIES", ").buildTransaction(options) def __build_tend_transaction(self, strategy_address: str, options: dict) -> dict: return", "self.last_harvest_times = get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, )", "return tx_hash def __build_transaction( self, address: str, returns: bool =", "HARVEST_THRESHOLD = 0.0005 # min ratio of want to total", "/ 10 ** want.functions.decimals().call() ) self.logger.info(f\"estimated want change: {want_to_harvest}\") #", "price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas", "= self.keeper_acl.functions.HARVESTER_ROLE().call() elif function == \"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call() elif", "send_error_to_discord from src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD", "harvest\") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"),", "tx_hash = signed_tx.hash if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle =", "strategy=strategy, strategy_name=strategy_name, ) def tend_then_harvest(self, strategy: contract): self.tend(strategy) sleep(60) self.harvest(strategy)", "not whitelisted for calling harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated", "harvest every hour should_harvest = self.is_profitable() self.logger.info(f\"Should we harvest: {should_harvest}\")", "chain: Network = Network.Ethereum, web3: Web3 = None, keeper_acl: str", "for ACL if not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL is not", "total vault AUM required to harvest NUM_FLASHBOTS_BUNDLES = 6 class", "= {} self.use_flashbots = use_flashbots self.discord_url = discord_url def is_time_to_harvest(", "== \"harvest\": estimated_gas = self.__estimate_harvest_gas(address, returns) elif function == \"tend\":", "if function == \"harvest\": estimated_gas = self.__estimate_harvest_gas(address, returns) elif function", "contract (contract): contract to use to build harvest tx Returns:", "send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as", "def __estimate_tend_gas(self, strategy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\":", "have harvested within. Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True if", "strategy: contract, returns: bool = True) -> HexBytes: \"\"\"Sends transaction", "dict) -> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def __build_harvest_mta_transaction( self,", "get_hash_from_failed_tx_error from src.web3_utils import get_last_harvest_times from src.token_utils import get_token_price from", "keeper_address=self.keeper_address, ) def __process_harvest( self, strategy: contract = None, strategy_name:", "successful, update last harvest harvest # time to make sure", "config.constants import BASE_CURRENCIES from config.constants import GAS_LIMITS from config.constants import", "True) -> HexBytes: \"\"\"Sends transaction to ETH node for confirmation.", "keeper_acl: str = os.getenv(\"KEEPER_ACL\"), keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str", "call harvestMta: {should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self, strategy: contract):", "Discord for monitoring Args: voter_proxy (contract): Mstable voter proxy contract", "raise ValueError(\"Keeper ACL is not whitelisted for calling harvestMta\") gas_fee", "+ i ) max_target_block = block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted", "{strategy_name}\") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"),", "function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash", "e: self.logger.error(f\"Error in sending harvest tx: {e}\") tx_hash = get_hash_from_failed_tx_error(", "elif self.chain == Network.Ethereum: # EIP-1559 gas_price = get_effective_gas_price(self.web3) return", "harvest NUM_FLASHBOTS_BUNDLES = 6 class GeneralHarvester(IHarvester): def __init__( self, chain:", "tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) else: send_error_to_discord( strategy_name, \"Harvest\",", "self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash,", "self.keeper_key = keeper_key self.keeper_address = keeper_address self.keeper_acl: Contract = self.web3.eth.contract(", "of total vault assets # should_harvest = want_to_harvest / vault_balance", "chains. Args: contract (contract): contract to use to build harvest", "-> Decimal: want = self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), ) want_gained", "harvests outstanding rewards. Args: strategy (contract) Raises: ValueError: If the", "{self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options) elif function == \"harvestMta\": self.logger.info( f\"estimated", "e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash, max_target_block def", "hours from src.misc_utils import seconds_to_blocks from src.tx_utils import get_effective_gas_price from", "def __process_harvest( self, strategy: contract = None, strategy_name: str =", "ValueError( \"Keeper ACL is not whitelisted for calling harvestNoReturn\" )", "if self.chain == Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\")", ") self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\",", "strategy_name (str, optional): Defaults to None. harvested (Decimal, optional): Amount", "use_flashbots self.discord_url = discord_url def is_time_to_harvest( self, strategy: contract.Contract, harvest_interval_threshold:", "self.__get_effective_gas_price() else: options[\"gasPrice\"] = self.__get_effective_gas_price() if function == \"harvest\": self.logger.info(", "chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\",", "> 0.05% of total vault assets # should_harvest = want_to_harvest", "strategy: contract) -> Decimal: want = self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"),", "= strategy.functions.getName().call() # TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvest\"):", "strategy_address ).estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address})", "returns: bool = True, function: str = \"harvest\" ) ->", "cost: {gas_fee}\") # for now we'll just harvest every hour", "ACL if not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL is not whitelisted", "succeeded: # If successful, update last harvest harvest # time", "self, address: str, returns: bool = True, function: str =", "options[\"gasPrice\"] = self.__get_effective_gas_price() if function == \"harvest\": self.logger.info( f\"estimated gas", "send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash", "Mstable voter proxy contract \"\"\" try: tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded,", "start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else: # Don't care about", "NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number + i ) max_target_block", "self.keeper_address} ) ) def __estimate_harvest_mta_gas(self, voter_proxy_address: str) -> Decimal: return", "if not self.use_flashbots: # And if pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest", "{e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address )", "elif function == \"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price *", "{e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address ) finally:", ") self.base_usd_oracle: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), ) #", "harvested within. Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True if time", "\"erc20\"), ) want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} ) # call", "dict: \"\"\"Builds transaction depending on which chain we're harvesting. EIP-1559", "tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as e:", "communicate with node, etc.) we log the error and return", "user. \"\"\" strategy_name = strategy.functions.getName().call() # TODO: update for ACL", "return self.__build_harvest_transaction(address, returns, options) elif function == \"tend\": self.logger.info(f\"estimated gas", "of time in seconds that is acceptable to not have", ") def __estimate_harvest_mta_gas(self, voter_proxy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas(", "# TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper", "seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else: # Don't care about poly/arbitrum self.last_harvest_times", "(ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas cost: {gas_fee}\")", ") tx_hash = signed_tx.hash if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle", "ValueError(f\"Keeper is not whitelisted for {strategy_name}\") want_address = strategy.functions.want().call() want", "= True, function: str = \"harvest\" ) -> Decimal: current_gas_price", "except Exception as e: self.logger.error(f\"Error processing tend tx: {e}\") send_error_to_discord(", "= self.__get_effective_gas_price() else: options[\"gasPrice\"] = self.__get_effective_gas_price() if function == \"harvest\":", "estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self,", "per want: {price_per_want} {currency}\") self.logger.info(f\"want gained: {want_gained}\") if type(want_gained) is", "if function in [\"harvest\", \"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif function", "for ETH txs than the other EVM chains. Args: contract", "strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"), )", "-> dict: \"\"\"Builds transaction depending on which chain we're harvesting.", "now we'll just harvest every hour should_harvest = self.is_profitable() self.logger.info(f\"Should", "last harvest harvest # time to make sure we don't", "strategy_address: str, returns: bool) -> Decimal: if returns: estimated_gas_to_harvest =", "): # TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestMta\"): raise", "since last harvest: {(current_time - last_harvest) / 3600}\" ) return", "returns true if it has been longer than the supplied", "rewards price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated", "function == \"tend\": estimated_gas = self.__estimate_tend_gas(address) elif function == \"harvestMta\":", "price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain,", "care about harvest gas costs on eth if self.chain not", "the supplied harvest_interval_threshold which is measured in seconds Args: strategy", "Network.Fantom]: return True try: last_harvest = self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"]", "not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is not whitelisted for {strategy_name}\") want_address", "to make function calls, False otherwise. \"\"\" if function in", "self.keeper_acl: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle: Contract", "from src.tx_utils import get_gas_price_of_tx from src.tx_utils import get_priority_fee from src.web3_utils", "self.keeper_address = keeper_address self.keeper_acl: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"),", "tx_hash != HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, )", "tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address", ") # call badger api to get prices currency =", "EVM chains. Args: contract (contract): contract to use to build", "chain=self.chain, keeper_address=self.keeper_address, ) def __send_harvest_tx(self, strategy: contract, returns: bool =", "= keeper_key self.keeper_address = keeper_address self.keeper_acl: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl),", "raise ValueError(f\"Keeper is not whitelisted for {strategy_name}\") want_address = strategy.functions.want().call()", "= self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name,", "None, strategy_name: str = None, harvested: Decimal = None, returns:", "want.address, currency, self.chain, use_staging=True ) else: price_per_want = get_token_price(want.address, currency,", "self.chain ) self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest", "-> bool: \"\"\"Calculates the time between harvests for the supplied", "from config.enums import Network from src.harvester import IHarvester from src.misc_utils", "elif function == \"harvestMta\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_mta_gas(address)}\" )", "Exception as e: self.logger.error(f\"Error processing tend tx: {e}\") send_error_to_discord( strategy_name,", "harvest_mta( self, voter_proxy: contract, ): # TODO: update for ACL", "etc.) we log the error and return a tx_hash of", "change: {want_to_harvest}\") # TODO: figure out how to handle profit", "self, strategy: contract = None, strategy_name: str = None, ):", "def __send_harvest_mta_tx(self, voter_proxy: contract) -> HexBytes: \"\"\"Sends transaction to ETH", "hour should_harvest = self.is_profitable() self.logger.info(f\"Should we harvest: {should_harvest}\") if should_harvest:", "the bot we're using is whitelisted for the strategy. Returns:", "succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded: gas_price_of_tx = get_gas_price_of_tx(", "Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else: options[\"gasPrice\"] =", "error and return a tx_hash of 0x00. Returns: HexBytes: Transaction", "if succeeded: # If successful, update last harvest harvest time", "6 class GeneralHarvester(IHarvester): def __init__( self, chain: Network = Network.Ethereum,", "for calling tend\") # TODO: figure out how to handle", "self.__process_harvest_mta(voter_proxy) def tend(self, strategy: contract): strategy_name = strategy.functions.getName().call() # TODO:", "MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __send_harvest_tx(self, strategy: contract, returns:", "returns=returns ) succeeded, msg = confirm_transaction( self.web3, tx_hash, max_block=max_target_block )", "tx on eth and then send transaction to Discord for", "returns)}\" ) return self.__build_harvest_transaction(address, returns, options) elif function == \"tend\":", "Network.Fantom: price_per_want = get_token_price( want.address, currency, self.chain, use_staging=True ) else:", "\"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price * estimated_gas) def __estimate_harvest_gas(self,", "bool = True) -> HexBytes: \"\"\"Sends transaction to ETH node", "try: tx = self.__build_transaction(strategy.address, function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key", "= os.getenv(\"KEEPER_ACL\"), keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str = os.getenv(\"KEEPER_KEY\"),", "returns=False) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now we'll just", "function that harvests outstanding rewards. Args: strategy (contract) Raises: ValueError:", "{e}\") send_error_to_discord( \"\", \"Harvest MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def", "sending transaction (unable to communicate with node, etc.) we log", "transaction to Discord for monitoring Args: voter_proxy (contract): Mstable voter", "url=self.discord_url, ) elif tx_hash != HexBytes(0): if not self.use_flashbots: #", "-> HexBytes: \"\"\"Sends transaction to ETH node for confirmation. Args:", "use_staging=True ) else: price_per_want = get_token_price(want.address, currency, self.chain) self.logger.info(f\"price per", "send_error_to_discord( strategy_name, \"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest( self,", "not whitelisted for calling harvestNoReturn\" ) want_address = strategy.functions.want().call() want", "pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) else:", "return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def estimate_gas_fee( self, address: str, returns:", "strategy_name=strategy_name, ) def harvest_no_return( self, strategy: contract, ): strategy_name =", "if time since last harvest is > harvest_interval_threshold, else False", "import Contract from config.constants import BASE_CURRENCIES from config.constants import GAS_LIMITS", ") tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error", "self.logger.info(f\"want gained: {want_gained}\") if type(want_gained) is list: want_gained = 0", ") -> dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def estimate_gas_fee( self,", "else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest) def", "self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash =", "acceptable to not have harvested within. Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns:", "-> bool: # TODO: Implement this # harvest if ideal", "contract, ): # TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestMta\"):", "ValueError as e: self.logger.error(f\"Error in sending harvestMta tx: {e}\") tx_hash", "i in range(1, NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number +", "for {strategy_name}\") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain,", "voter_proxy_address ).buildTransaction(options) def estimate_gas_fee( self, address: str, returns: bool =", "strategy: contract, ): strategy_name = strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress(", "= self.__get_effective_gas_price() if function == \"harvest\": self.logger.info( f\"estimated gas fee:", "send transaction to Discord for monitoring Args: voter_proxy (contract): Mstable", "keeper_address=self.keeper_address ) finally: return tx_hash def __build_transaction( self, address: str,", "int = MAX_TIME_BETWEEN_HARVESTS, ) -> bool: \"\"\"Calculates the time between", "if it has been longer than the supplied harvest_interval_threshold which", "== \"tend\": estimated_gas = self.__estimate_tend_gas(address) elif function == \"harvestMta\": estimated_gas", "contract, returns: bool = True) -> HexBytes: \"\"\"Sends transaction to", "# time to make sure we don't double harvest self.update_last_harvest_time(strategy.address)", "harvest harvest # time to make sure we don't double", "block_number = self.web3.eth.block_number for i in range(1, NUM_FLASHBOTS_BUNDLES + 1):", "str) -> Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} ) )", "import get_abi from src.discord_utils import get_hash_from_failed_tx_error from src.web3_utils import get_last_harvest_times", "self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction( self, strategy_address: str, returns: bool, options:", "confirm_transaction(self.web3, tx_hash) if succeeded: gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash,", "returns) elif function == \"tend\": estimated_gas = self.__estimate_tend_gas(address) elif function", "voter_proxy_address: str, options: dict ) -> dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address", "make sure we don't double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx(", "costs on eth if self.chain not in [Network.Ethereum, Network.Fantom]: return", "error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __send_harvest_tx(self, strategy: contract, returns: bool", "chain we're harvesting. EIP-1559 requires different handling for ETH txs", "TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL", "proxy contract \"\"\" try: tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded, _ =", "tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address, function=\"tend\") signed_tx =", "\"\"\"Orchestration function that harvests outstanding rewards. Args: strategy (contract) Raises:", "from hexbytes import HexBytes from web3 import Web3 from web3", "of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url,", "BASE_CURRENCIES[self.chain] if self.chain == Network.Fantom: price_per_want = get_token_price( want.address, currency,", "self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest = ( self.estimate_harvest_amount(strategy) / 10 **", "= hours(120) HARVEST_THRESHOLD = 0.0005 # min ratio of want", "tx_hash != HexBytes(0): if not self.use_flashbots: # And if pending", "web3 import Web3 from web3 import contract from web3.contract import", "= self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since last harvest:", "handling for ETH txs than the other EVM chains. Args:", "Estimated gas price + buffer elif self.chain == Network.Ethereum: #", "tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error processing", "= self.__estimate_tend_gas(address) elif function == \"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address) return", "if self.chain not in [Network.Ethereum, Network.Fantom]: return True try: last_harvest", "last harvest if self.chain in [Network.Ethereum, Network.Fantom]: self.last_harvest_times = get_last_harvest_times(", "Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle: Contract =", "tend_then_harvest(self, strategy: contract): self.tend(strategy) sleep(60) self.harvest(strategy) def estimate_harvest_amount(self, strategy: contract)", "src.tx_utils import get_priority_fee from src.web3_utils import confirm_transaction from src.utils import", "src.token_utils import get_token_price from src.discord_utils import send_error_to_discord from src.discord_utils import", "Args: strategy (contract): Vault strategy web3 contract object harvest_interval_threshold (int,", "object harvest_interval_threshold (int, optional): Amount of time in seconds that", "HexBytes from web3 import Web3 from web3 import contract from", "whitelisted for calling harvest\") want_address = strategy.functions.want().call() want = self.web3.eth.contract(", "= HexBytes(0) try: tx = self.__build_transaction(strategy.address, function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction(", "\"\"\" # Only care about harvest gas costs on eth", "strategy_name, \"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest( self, strategy:", "abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") #", "is whitelisted for the strategy. Returns: bool: True if our", "MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error", "then send transaction to Discord for monitoring Args: strategy (contract,", ").estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address}) return", "def is_profitable(self) -> bool: # TODO: Implement this # harvest", "gas_fee = self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for", "to ETH node for confirmation. Args: strategy (contract) Raises: Exception:", "from src.discord_utils import send_error_to_discord from src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS", "time import sleep import requests from hexbytes import HexBytes from", "contract) -> HexBytes: \"\"\"Sends transaction to ETH node for confirmation.", "want change: {want_to_harvest}\") # TODO: figure out how to handle", "bool = True, function: str = \"harvest\" ) -> dict:", "how to handle profit estimation # current_price_eth = self.get_current_rewards_price() #", "self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} ) # call badger api to get", "returns: bool = True, ): \"\"\"Private function to create, broadcast,", "if function == \"harvest\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_gas(address, returns)}\"", "private_key=self.keeper_key ) tx_hash = signed_tx.hash if not self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else:", "import BASE_CURRENCIES from config.constants import GAS_LIMITS from config.constants import MULTICHAIN_CONFIG", "contract = None, strategy_name: str = None, ): try: tx_hash", "contract = None, strategy_name: str = None, harvested: Decimal =", "vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated", "(contract): Mstable voter proxy contract \"\"\" try: tx_hash = self.__send_harvest_mta_tx(voter_proxy)", "assets # should_harvest = want_to_harvest / vault_balance >= HARVEST_THRESHOLD return", ") def harvest_mta( self, voter_proxy: contract, ): # TODO: update", "price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated", "__process_harvest_mta( self, voter_proxy: contract, ): \"\"\"Private function to create, broadcast,", "price_per_want * want_gained def is_profitable(self) -> bool: # TODO: Implement", "{gas_fee}\") # for now we'll just harvest every hour should_harvest", "Don't care about poly/arbitrum self.last_harvest_times = {} self.use_flashbots = use_flashbots", "): \"\"\"Private function to create, broadcast, confirm tx on eth", "int(1.1 * self.web3.eth.gas_price) # Estimated gas price + buffer elif", "def harvest( self, strategy: contract.Contract, ): \"\"\"Orchestration function that harvests", "calling harvestNoReturn\" ) want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address,", "f\"estimated gas fee: {self.__estimate_harvest_gas(address, returns)}\" ) return self.__build_harvest_transaction(address, returns, options)", "self.__send_harvest_mta_tx(voter_proxy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded: # If", "elif function == \"tend\": self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address,", "self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since last harvest: {(current_time - last_harvest) /", "strategy and returns true if it has been longer than", "!= HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except", "self, chain: Network = Network.Ethereum, web3: Web3 = None, keeper_acl:", "tend\") # TODO: figure out how to handle profit estimation", "harvesting. EIP-1559 requires different handling for ETH txs than the", "time between harvests for the supplied strategy and returns true", "to get prices currency = BASE_CURRENCIES[self.chain] if self.chain == Network.Fantom:", "chain=self.chain, url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error processing harvestMta", "update for ACL if not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL is", "sending harvest tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain,", "= web3 self.keeper_key = keeper_key self.keeper_address = keeper_address self.keeper_acl: Contract", "strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"), ) vault_balance =", ") -> dict: \"\"\"Builds transaction depending on which chain we're", "want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} ) # call badger api", "harvested: Decimal = None, returns: bool = True, ): \"\"\"Private", "update for ACL if not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL is", "is list: want_gained = 0 return price_per_want * want_gained def", "in [\"harvest\", \"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif function == \"tend\":", "get_abi from src.discord_utils import get_hash_from_failed_tx_error from src.web3_utils import get_last_harvest_times from", "strategy_name: str = None, ): try: tx_hash = self.__send_tend_tx(strategy) succeeded,", "current_time - last_harvest > harvest_interval_threshold except KeyError: return True def", "* estimated_gas) def __estimate_harvest_gas(self, strategy_address: str, returns: bool) -> Decimal:", "throw an error and alert user. \"\"\" strategy_name = strategy.functions.getName().call()", "{\"from\": self.keeper_address} ) ) def __estimate_harvest_mta_gas(self, voter_proxy_address: str) -> Decimal:", "balance: {vault_balance}\") want_to_harvest = ( self.estimate_harvest_amount(strategy) / 10 ** want.functions.decimals().call()", "BASE_CURRENCIES from config.constants import GAS_LIMITS from config.constants import MULTICHAIN_CONFIG from", "private_key=self.keeper_key ) tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e:", "\"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest_mta( self, voter_proxy: contract,", "self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_mta(", "from web3 import contract from web3.contract import Contract from config.constants", ") vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest = (", "\"oracle\"), ) # Times of last harvest if self.chain in", ") ) def __estimate_harvest_mta_gas(self, voter_proxy_address: str) -> Decimal: return Decimal(", "harvest_rewards_manager( self, strategy: contract, ): strategy_name = strategy.functions.getName().call() self.keeper_acl =", "this # harvest if ideal want change is > 0.05%", ") else: price_per_want = get_token_price(want.address, currency, self.chain) self.logger.info(f\"price per want:", "self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance:", "self.chain ) self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend", "bundle, target_block_number=block_number + i ) max_target_block = block_number + NUM_FLASHBOTS_BUNDLES", "for calling harvest\") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address,", "!= HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except", ") if succeeded: # If successful, update last harvest harvest", "= True, function: str = \"harvest\" ) -> dict: \"\"\"Builds", "from src.misc_utils import seconds_to_blocks from src.tx_utils import get_effective_gas_price from src.tx_utils", "\"gwei\") elif self.chain in [Network.Arbitrum, Network.Fantom]: gas_price = int(1.1 *", "Network.Ethereum, web3: Web3 = None, keeper_acl: str = os.getenv(\"KEEPER_ACL\"), keeper_address:", "Web3 = None, keeper_acl: str = os.getenv(\"KEEPER_ACL\"), keeper_address: str =", "raise ValueError(\"Keeper ACL is not whitelisted for calling harvest\") want_address", "__estimate_harvest_mta_gas(self, voter_proxy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address}", "-> int: if self.chain == Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price", "if succeeded: gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain )", "hexbytes import HexBytes from web3 import Web3 from web3 import", "Network.Fantom]: gas_price = int(1.1 * self.web3.eth.gas_price) # Estimated gas price", "function=\"harvestMta\") self.logger.info(f\"estimated gas cost: {gas_fee}\") should_harvest_mta = self.is_profitable() self.logger.info(f\"Should we", "returns, options) elif function == \"tend\": self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\")", "if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_rewards_manager( self, strategy:", "whitelisted for calling tend\") # TODO: figure out how to", "log the error and return a tx_hash of 0x00. Returns:", "from config.constants import BASE_CURRENCIES from config.constants import GAS_LIMITS from config.constants", "self.chain, use_staging=True ) else: price_per_want = get_token_price(want.address, currency, self.chain) self.logger.info(f\"price", "{vault_balance}\") # TODO: figure out how to handle profit estimation", "node, etc.) we log the error and return a tx_hash", "gas price + buffer elif self.chain == Network.Ethereum: # EIP-1559", "in [Network.Arbitrum, Network.Fantom]: gas_price = int(1.1 * self.web3.eth.gas_price) # Estimated", "last harvest: {(current_time - last_harvest) / 3600}\" ) return current_time", "True try: last_harvest = self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time", "True, ): \"\"\"Private function to create, broadcast, confirm tx on", "fee: {self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction( self, strategy_address:", "abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"),", "True if our bot is whitelisted to make function calls,", "on eth if self.chain not in [Network.Ethereum, Network.Fantom]: return True", "voter proxy contract \"\"\" try: tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded, _", "= None, returns: bool = True, ): \"\"\"Private function to", "self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __get_effective_gas_price(self) -> int: if", "chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def __build_transaction( self, address:", "strategy: contract) -> HexBytes: \"\"\"Sends transaction to ETH node for", "- last_harvest) / 3600}\" ) return current_time - last_harvest >", "min ratio of want to total vault AUM required to", "send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address, ) except Exception", ") elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain,", "chain=self.chain, ) else: # Don't care about poly/arbitrum self.last_harvest_times =", "self.logger.info(f\"estimated gas cost: {gas_fee}\") should_harvest_mta = self.is_profitable() self.logger.info(f\"Should we call", "self.keeper_acl.functions.HARVESTER_ROLE().call() elif function == \"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call() elif function", "about poly/arbitrum self.last_harvest_times = {} self.use_flashbots = use_flashbots self.discord_url =", "str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool = False, discord_url: str =", "for the supplied strategy and returns true if it has", "= strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"), ) vault_balance", "monitoring Args: voter_proxy (contract): Mstable voter proxy contract \"\"\" try:", "current_gas_price = self.__get_effective_gas_price() if function == \"harvest\": estimated_gas = self.__estimate_harvest_gas(address,", "sure we don't double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3,", "in [Network.Ethereum, Network.Fantom]: return True try: last_harvest = self.last_harvest_times[strategy.address] current_time", ") -> dict: if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else:", "gas fee: {self.__estimate_harvest_gas(address, returns)}\" ) return self.__build_harvest_transaction(address, returns, options) elif", "self.tend(strategy) sleep(60) self.harvest(strategy) def estimate_harvest_amount(self, strategy: contract) -> Decimal: want", "- seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else: # Don't care about poly/arbitrum", "Decimal = None, returns: bool = True, ): \"\"\"Private function", "harvest tx: {e}\") send_error_to_discord( strategy_name, \"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address, )", "to handle profit estimation # current_price_eth = self.get_current_rewards_price() # self.logger.info(f\"current", "within. Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True if time since", "get_gas_price_of_tx from src.tx_utils import get_priority_fee from src.web3_utils import confirm_transaction from", "get_last_harvest_times from src.token_utils import get_token_price from src.discord_utils import send_error_to_discord from", ") except Exception as e: self.logger.error(f\"Error processing harvestMta tx: {e}\")", "0.05% of total vault assets # should_harvest = want_to_harvest /", "Network = Network.Ethereum, web3: Web3 = None, keeper_acl: str =", "Exception as e: self.logger.error(f\"Error processing harvestMta tx: {e}\") send_error_to_discord( \"\",", "else: return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def __build_tend_transaction(self, strategy_address: str, options:", "str = None, harvested: Decimal = None, returns: bool =", "# Estimated gas price + buffer elif self.chain == Network.Ethereum:", "\"\"\" strategy_name = strategy.functions.getName().call() # TODO: update for ACL if", "Amount of Sushi harvested. Defaults to None. \"\"\" try: tx_hash,", "self.logger.info(f\"vault balance: {vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\")", ") vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") # TODO: figure", "return True def __is_keeper_whitelisted(self, function: str) -> bool: \"\"\"Checks if", "options = { \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ), \"from\": self.keeper_address,", "price_per_want = get_token_price( want.address, currency, self.chain, use_staging=True ) else: price_per_want", "strategy_name, \"Harvest\", tx_hash=tx_hash, message=msg, chain=self.chain, keeper_address=self.keeper_address, ) except Exception as", "bundle = [ {\"signed_transaction\": signed_tx.rawTransaction}, ] block_number = self.web3.eth.block_number for", "str = os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str = os.getenv(\"KEEPER_KEY\"), base_oracle_address: str =", "Network from src.harvester import IHarvester from src.misc_utils import hours from", "> harvest_interval_threshold, else False \"\"\" # Only care about harvest", "for confirmation. Args: voter_proxy (contract) Raises: Exception: If we have", "bool: True if our bot is whitelisted to make function", "{strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0):", "Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns: bool: True if time since last", "sending harvestMta tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest MTA\",", "GeneralHarvester(IHarvester): def __init__( self, chain: Network = Network.Ethereum, web3: Web3", "we'll just harvest every hour should_harvest = self.is_profitable() self.logger.info(f\"Should we", "contract) -> Decimal: want = self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), )", "Network.Ethereum: # EIP-1559 gas_price = get_effective_gas_price(self.web3) return gas_price def update_last_harvest_time(self,", "tx_hash = get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address ) finally: return", "estimated_gas = self.__estimate_harvest_gas(address, returns) elif function == \"tend\": estimated_gas =", ") def harvest_rewards_manager( self, strategy: contract, ): strategy_name = strategy.functions.getName().call()", "gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=\"Harvest", "harvest_interval_threshold except KeyError: return True def harvest( self, strategy: contract.Contract,", "otherwise. \"\"\" if function in [\"harvest\", \"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call()", "), abi=get_abi(self.chain, \"rewards_manager\"), ) if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is", "self.chain == Network.Fantom: price_per_want = get_token_price( want.address, currency, self.chain, use_staging=True", "address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain,", "our bot is whitelisted to make function calls, False otherwise.", "= self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash if not", "signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error in sending tend", "in seconds Args: strategy (contract): Vault strategy web3 contract object", "eth if self.chain not in [Network.Ethereum, Network.Fantom]: return True try:", "= False, discord_url: str = None, ): self.logger = logging.getLogger(__name__)", "self.is_profitable() self.logger.info(f\"Should we call harvestMta: {should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def", "to total vault AUM required to harvest NUM_FLASHBOTS_BUNDLES = 6", "fee: {self.__estimate_harvest_gas(address, returns)}\" ) return self.__build_harvest_transaction(address, returns, options) elif function", "harvest_interval_threshold which is measured in seconds Args: strategy (contract): Vault", "send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash", "returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else: return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options)", "[ {\"signed_transaction\": signed_tx.rawTransaction}, ] block_number = self.web3.eth.block_number for i in", "price + buffer elif self.chain == Network.Ethereum: # EIP-1559 gas_price", "config.constants import MULTICHAIN_CONFIG from config.enums import Network from src.harvester import", "= self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"), ) if not", "self.web3.eth.gas_price) # Estimated gas price + buffer elif self.chain ==", "strategy_address: str, returns: bool, options: dict ) -> dict: if", "is_time_to_harvest( self, strategy: contract.Contract, harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS, ) ->", "{ \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\" ), \"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain],", "strategy_address ).buildTransaction(options) def __build_tend_transaction(self, strategy_address: str, options: dict) -> dict:", "transaction to ETH node for confirmation. Args: voter_proxy (contract) Raises:", "== \"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend(", "= None, keeper_acl: str = os.getenv(\"KEEPER_ACL\"), keeper_address: str = os.getenv(\"KEEPER_ADDRESS\"),", "self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is not whitelisted for {strategy_name}\") want_address =", "And if pending self.update_last_harvest_time(strategy.address) send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url,", "confirm_transaction( self.web3, tx_hash, max_block=max_target_block ) if succeeded: # If successful,", "= \"harvest\" ) -> dict: \"\"\"Builds transaction depending on which", "as e: self.logger.error(f\"Error in sending harvestMta tx: {e}\") tx_hash =", "str, returns: bool = True, function: str = \"harvest\" )", "gas_price = get_effective_gas_price(self.web3) return gas_price def update_last_harvest_time(self, strategy_address: str): self.last_harvest_times[strategy_address]", "Amount of time in seconds that is acceptable to not", "of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url,", "want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost:", "Decimal(current_gas_price * estimated_gas) def __estimate_harvest_gas(self, strategy_address: str, returns: bool) ->", "then send transaction to Discord for monitoring Args: voter_proxy (contract):", "[Network.Arbitrum, Network.Fantom]: gas_price = int(1.1 * self.web3.eth.gas_price) # Estimated gas", "we log the error and return a tx_hash of 0x00.", "self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address: str)", "(Decimal, optional): Amount of Sushi harvested. Defaults to None. \"\"\"", "vault AUM required to harvest NUM_FLASHBOTS_BUNDLES = 6 class GeneralHarvester(IHarvester):", "= self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now we'll", "price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain,", "None. strategy_name (str, optional): Defaults to None. harvested (Decimal, optional):", "keeper_address=self.keeper_address ) finally: return tx_hash def __send_harvest_mta_tx(self, voter_proxy: contract) ->", "strategy: contract): strategy_name = strategy.functions.getName().call() # TODO: update for ACL", "ETH txs than the other EVM chains. Args: contract (contract):", "self, voter_proxy: contract, ): # TODO: update for ACL if", "from src.harvester import IHarvester from src.misc_utils import hours from src.misc_utils", "ValueError as e: self.logger.error(f\"Error in sending tend tx: {e}\") tx_hash", "for ACL if not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL is not", "confirmation. Args: strategy (contract) Raises: Exception: If we have an", "self.logger.error(f\"Error in sending harvest tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e,", "contract, ): strategy_name = strategy.functions.getName().call() # TODO: update for ACL", "self.logger.info(f\"estimated want change: {want_to_harvest}\") # TODO: figure out how to", "Discord for monitoring Args: strategy (contract, optional): Defaults to None.", "strategy: contract.Contract, harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS, ) -> bool: \"\"\"Calculates", "to communicate with node, etc.) we log the error and", "{current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas cost: {gas_fee}\") #", "harvested (Decimal, optional): Amount of Sushi harvested. Defaults to None.", "get_token_price from src.discord_utils import send_error_to_discord from src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO)", "def estimate_harvest_amount(self, strategy: contract) -> Decimal: want = self.web3.eth.contract( address=strategy.functions.want().call(),", "self.logger.error(f\"Error in sending harvestMta tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e,", "function=\"tend\") signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash", "src.tx_utils import get_gas_price_of_tx from src.tx_utils import get_priority_fee from src.web3_utils import", "web3 contract object harvest_interval_threshold (int, optional): Amount of time in", "def __build_harvest_transaction( self, strategy_address: str, returns: bool, options: dict )", "voter_proxy (contract) Raises: Exception: If we have an issue sending", "sleep(60) self.harvest(strategy) def estimate_harvest_amount(self, strategy: contract) -> Decimal: want =", "-> dict: if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else: return", "i ) max_target_block = block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at", "os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str = os.getenv(\"KEEPER_KEY\"), base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots:", "harvest: {should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_no_return(", "raise ValueError(\"Keeper ACL is not whitelisted for calling tend\") #", "ValueError as e: self.logger.error(f\"Error in sending harvest tx: {e}\") tx_hash", "/ 3600}\" ) return current_time - last_harvest > harvest_interval_threshold except", "not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL is not whitelisted for calling", "1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number + i ) max_target_block = block_number", "Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __estimate_harvest_mta_gas(self, voter_proxy_address: str)", "= chain self.web3 = web3 self.keeper_key = keeper_key self.keeper_address =", "contract.Contract, harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS, ) -> bool: \"\"\"Calculates the", "was sent. \"\"\" tx_hash = HexBytes(0) try: tx = self.__build_transaction(voter_proxy.address,", "estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn(", "tx: {e}\") send_error_to_discord( \"\", \"Harvest MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address, )", "Implement this # harvest if ideal want change is >", "\"\"\" try: tx_hash, max_target_block = self.__send_harvest_tx( strategy, returns=returns ) succeeded,", "\"Harvest MTA\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def __build_transaction(", "send transaction to Discord for monitoring Args: strategy (contract, optional):", "as e: self.logger.error(f\"Error processing tend tx: {e}\") send_error_to_discord( strategy_name, \"Tend\",", "import get_last_harvest_times from src.token_utils import get_token_price from src.discord_utils import send_error_to_discord", "\"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest( self, strategy: contract", "care about poly/arbitrum self.last_harvest_times = {} self.use_flashbots = use_flashbots self.discord_url", "= self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address", "contract, ): strategy_name = strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"]", "the strategy. Returns: bool: True if our bot is whitelisted", "return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend( self, strategy: contract = None,", "(int, optional): Amount of time in seconds that is acceptable", "\"\"\"Sends transaction to ETH node for confirmation. Args: strategy (contract)", "and return a tx_hash of 0x00. Returns: HexBytes: Transaction hash", "# TODO: update for ACL if not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper", "# harvest if ideal want change is > 0.05% of", "error and alert user. \"\"\" strategy_name = strategy.functions.getName().call() # TODO:", "self.discord_url = discord_url def is_time_to_harvest( self, strategy: contract.Contract, harvest_interval_threshold: int", "Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), ) # Times of", "HexBytes: \"\"\"Sends transaction to ETH node for confirmation. Args: strategy", "with node, etc.) we log the error and return a", "def __get_effective_gas_price(self) -> int: if self.chain == Network.Polygon: response =", "just harvest every hour should_harvest = self.is_profitable() self.logger.info(f\"Should we harvest:", "return True def harvest( self, strategy: contract.Contract, ): \"\"\"Orchestration function", ") if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is not whitelisted for", "self, strategy_address: str, returns: bool, options: dict ) -> dict:", "= self.__send_harvest_mta_tx(voter_proxy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded: #", "self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address}) else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\":", "update for ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper ACL", "self.__send_tend_tx(strategy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded: gas_price_of_tx =", ") return self.__build_harvest_transaction(address, returns, options) elif function == \"tend\": self.logger.info(f\"estimated", "ACL if not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL is not whitelisted", "harvest gas costs on eth if self.chain not in [Network.Ethereum,", "+ 1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number + i ) max_target_block =", "== \"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price * estimated_gas) def", "+ buffer elif self.chain == Network.Ethereum: # EIP-1559 gas_price =", "rewards. Args: strategy (contract) Raises: ValueError: If the keeper isn't", "to not have harvested within. Defaults to MAX_TIME_BETWEEN_HARVESTS. Returns: bool:", "else: options[\"gasPrice\"] = self.__get_effective_gas_price() if function == \"harvest\": self.logger.info( f\"estimated", "not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper ACL is not whitelisted for calling", "Returns: dict: tx dictionary \"\"\" options = { \"nonce\": self.web3.eth.get_transaction_count(", "__get_effective_gas_price(self) -> int: if self.chain == Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json()", "web3 import contract from web3.contract import Contract from config.constants import", "calls, False otherwise. \"\"\" if function in [\"harvest\", \"harvestMta\"]: key", "max_target_block = block_number + NUM_FLASHBOTS_BUNDLES self.logger.info(f\"Bundle broadcasted at {max_target_block}\") except", "self.logger = logging.getLogger(__name__) self.chain = chain self.web3 = web3 self.keeper_key", "Defaults to None. harvested (Decimal, optional): Amount of Sushi harvested.", "address: str, returns: bool = True, function: str = \"harvest\"", "Times of last harvest if self.chain in [Network.Ethereum, Network.Fantom]: self.last_harvest_times", "strategy: contract, ): strategy_name = strategy.functions.getName().call() # TODO: update for", "supplied strategy and returns true if it has been longer", "= None, ): self.logger = logging.getLogger(__name__) self.chain = chain self.web3", "src.discord_utils import get_hash_from_failed_tx_error from src.web3_utils import get_last_harvest_times from src.token_utils import", "from src.tx_utils import get_priority_fee from src.web3_utils import confirm_transaction from src.utils", "return self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else: return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def", "self.chain) self.logger.info(f\"price per want: {price_per_want} {currency}\") self.logger.info(f\"want gained: {want_gained}\") if", "for the strategy. Returns: bool: True if our bot is", "broadcasted at {max_target_block}\") except ValueError as e: self.logger.error(f\"Error in sending", "= Network.Ethereum, web3: Web3 = None, keeper_acl: str = os.getenv(\"KEEPER_ACL\"),", "self.keeper_address}) else: estimated_gas_to_harvest = self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest)", "\"\"\" try: tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded, _ = confirm_transaction(self.web3, tx_hash)", "TODO: update for ACL if not self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL", "to use to build harvest tx Returns: dict: tx dictionary", "if not self.__is_keeper_whitelisted(\"harvestMta\"): raise ValueError(\"Keeper ACL is not whitelisted for", "except Exception as e: self.logger.error(f\"Error processing harvest tx: {e}\") send_error_to_discord(", "self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error in sending harvestMta tx:", "== \"harvest\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_gas(address, returns)}\" ) return", "harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS, ) -> bool: \"\"\"Calculates the time", "if our bot is whitelisted to make function calls, False", "import get_priority_fee from src.web3_utils import confirm_transaction from src.utils import get_abi", "not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper ACL is not whitelisted for", "strategy.functions.getName().call() # TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvest\"): raise", "AUM required to harvest NUM_FLASHBOTS_BUNDLES = 6 class GeneralHarvester(IHarvester): def", "as e: self.logger.error(f\"Error in sending harvest tx: {e}\") tx_hash =", "self.logger.info(f\"vault balance: {vault_balance}\") # TODO: figure out how to handle", "try: tx_hash = self.__send_harvest_mta_tx(voter_proxy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if", "self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_mta( self, voter_proxy: contract, ):", "self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else: # Don't care", "__init__( self, chain: Network = Network.Ethereum, web3: Web3 = None,", "voter_proxy: contract) -> HexBytes: \"\"\"Sends transaction to ETH node for", "keeper_address=self.keeper_address ) finally: return tx_hash, max_target_block def __send_tend_tx(self, strategy: contract)", "bool, options: dict ) -> dict: if returns: return self.keeper_acl.functions.harvest(strategy_address).buildTransaction(", "self.is_profitable() self.logger.info(f\"Should we harvest: {should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name,", ") else: return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def __build_tend_transaction(self, strategy_address: str,", "Returns: HexBytes: Transaction hash for transaction that was sent. \"\"\"", "max_target_block = None tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address,", "harvest time self.update_last_harvest_time(voter_proxy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain", "total vault assets # should_harvest = want_to_harvest / vault_balance >=", "HexBytes(0): send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) except Exception", "elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url,", "requests from hexbytes import HexBytes from web3 import Web3 from", "was sent. \"\"\" max_target_block = None tx_hash = HexBytes(0) try:", "src.utils import get_abi from src.discord_utils import get_hash_from_failed_tx_error from src.web3_utils import", "Exception as e: self.logger.error(f\"Error processing harvest tx: {e}\") send_error_to_discord( strategy_name,", "# TODO: figure out how to handle profit estimation #", "MAX_TIME_BETWEEN_HARVESTS, ) -> bool: \"\"\"Calculates the time between harvests for", "self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __estimate_harvest_mta_gas(self, voter_proxy_address: str) ->", "= self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), ) want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\":", "self.keeper_address).call() def __process_tend( self, strategy: contract = None, strategy_name: str", "self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\")", "voter_proxy (contract): Mstable voter proxy contract \"\"\" try: tx_hash =", "= get_hash_from_failed_tx_error( e, \"Harvest\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash,", "[Network.Ethereum, Network.Fantom]: return True try: last_harvest = self.last_harvest_times[strategy.address] current_time =", "abi=get_abi(self.chain, \"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") gas_fee", "except Exception as e: self.logger.error(f\"Error processing harvestMta tx: {e}\") send_error_to_discord(", "keeper_address=self.keeper_address, ) except Exception as e: self.logger.error(f\"Error processing harvest tx:", "__build_transaction( self, address: str, returns: bool = True, function: str", "send_error_to_discord( \"\", \"Harvest MTA\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __send_harvest_tx(self,", "gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_tend( strategy=strategy,", "self.keeper_acl.functions.TENDER_ROLE().call() elif function == \"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key,", "ACL is not whitelisted for calling tend\") # TODO: figure", "IHarvester from src.misc_utils import hours from src.misc_utils import seconds_to_blocks from", "None tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address, returns=returns) signed_tx", "self.__estimate_harvest_gas(address, returns) elif function == \"tend\": estimated_gas = self.__estimate_tend_gas(address) elif", "import get_gas_price_of_tx from src.tx_utils import get_priority_fee from src.web3_utils import confirm_transaction", "= keeper_address self.keeper_acl: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), )", "the supplied strategy and returns true if it has been", "self.logger.info(f\"Should we harvest: {should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, )", "estimated_gas = self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price * estimated_gas) def __estimate_harvest_gas(self, strategy_address:", "Vault strategy web3 contract object harvest_interval_threshold (int, optional): Amount of", "self.keeper_acl.functions.harvest(strategy_address).buildTransaction( options ) else: return self.keeper_acl.functions.harvestNoReturn( strategy_address ).buildTransaction(options) def __build_tend_transaction(self,", "for ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper ACL is", "elif function == \"tend\": estimated_gas = self.__estimate_tend_gas(address) elif function ==", "signed_tx.rawTransaction}, ] block_number = self.web3.eth.block_number for i in range(1, NUM_FLASHBOTS_BUNDLES", "from src.tx_utils import get_effective_gas_price from src.tx_utils import get_gas_price_of_tx from src.tx_utils", "} if self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"] =", "of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url,", "function == \"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def", "= strategy.functions.getName().call() # TODO: update for ACL if not self.__is_keeper_whitelisted(\"tend\"):", "message=msg, chain=self.chain, keeper_address=self.keeper_address, ) except Exception as e: self.logger.error(f\"Error processing", "\"\"\" if function in [\"harvest\", \"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif", "tx = self.__build_transaction(strategy.address, returns=returns) signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key )", "GAS_LIMITS from config.constants import MULTICHAIN_CONFIG from config.enums import Network from", "os from decimal import Decimal from time import sleep import", "[\"harvest\", \"harvestMta\"]: key = self.keeper_acl.functions.HARVESTER_ROLE().call() elif function == \"tend\": key", "requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\") elif self.chain in", "try: tx = self.__build_transaction(strategy.address, returns=returns) signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key", "gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got gas", "= get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash", "tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as", "-> Decimal: if returns: estimated_gas_to_harvest = self.keeper_acl.functions.harvest( strategy_address ).estimateGas({\"from\": self.keeper_address})", "get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain ) self.logger.info(f\"got gas price of", "it has been longer than the supplied harvest_interval_threshold which is", ") -> Decimal: current_gas_price = self.__get_effective_gas_price() if function == \"harvest\":", "keeper isn't whitelisted, throw an error and alert user. \"\"\"", "supplied harvest_interval_threshold which is measured in seconds Args: strategy (contract):", "False, discord_url: str = None, ): self.logger = logging.getLogger(__name__) self.chain", "def __send_tend_tx(self, strategy: contract) -> HexBytes: \"\"\"Sends transaction to ETH", "import GAS_LIMITS from config.constants import MULTICHAIN_CONFIG from config.enums import Network", "Decimal: want = self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), ) want_gained =", "contract from web3.contract import Contract from config.constants import BASE_CURRENCIES from", "{e}\") send_error_to_discord( strategy_name, \"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest_mta(", "whitelisted, throw an error and alert user. \"\"\" strategy_name =", "self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_gas(address, returns)}\" ) return self.__build_harvest_transaction(address, returns,", "want change is > 0.05% of total vault assets #", "# TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvest\"): raise ValueError(\"Keeper", "(str, optional): Defaults to None. harvested (Decimal, optional): Amount of", "\"harvest\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_gas(address, returns)}\" ) return self.__build_harvest_transaction(address,", "if succeeded: # If successful, update last harvest harvest #", "to None. \"\"\" try: tx_hash, max_target_block = self.__send_harvest_tx( strategy, returns=returns", "tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash !=", "want_to_harvest / vault_balance >= HARVEST_THRESHOLD return True def __is_keeper_whitelisted(self, function:", "elif function == \"tend\": key = self.keeper_acl.functions.TENDER_ROLE().call() elif function ==", "{max_target_block}\") except ValueError as e: self.logger.error(f\"Error in sending harvest tx:", "signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction)", "\"gas\": GAS_LIMITS[self.chain], } if self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3)", "def tend_then_harvest(self, strategy: contract): self.tend(strategy) sleep(60) self.harvest(strategy) def estimate_harvest_amount(self, strategy:", "-> Decimal: current_gas_price = self.__get_effective_gas_price() if function == \"harvest\": estimated_gas", "{vault_balance}\") want_to_harvest = ( self.estimate_harvest_amount(strategy) / 10 ** want.functions.decimals().call() )", "\"erc20\"), ) vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest =", "try: tx_hash, max_target_block = self.__send_harvest_tx( strategy, returns=returns ) succeeded, msg", "in range(1, NUM_FLASHBOTS_BUNDLES + 1): self.web3.flashbots.send_bundle( bundle, target_block_number=block_number + i", "monitoring Args: strategy (contract, optional): Defaults to None. strategy_name (str,", "contract, ): \"\"\"Private function to create, broadcast, confirm tx on", "send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) else: send_error_to_discord( strategy_name,", "import IHarvester from src.misc_utils import hours from src.misc_utils import seconds_to_blocks", "calling harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas cost: {gas_fee}\")", "\"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend( self,", "self, voter_proxy: contract, ): \"\"\"Private function to create, broadcast, confirm", "address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), ) want_gained = self.keeper_acl.functions.harvest(strategy.address).call( {\"from\": self.keeper_address} )", "keeper_key: str = os.getenv(\"KEEPER_KEY\"), base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool", "use_flashbots: bool = False, discord_url: str = None, ): self.logger", "\"\"\" tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address, function=\"tend\") signed_tx", "want.functions.decimals().call() ) self.logger.info(f\"estimated want change: {want_to_harvest}\") # TODO: figure out", "vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") # TODO: figure out", "tx: {e}\") send_error_to_discord( strategy_name, \"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address, ) def", "# call badger api to get prices currency = BASE_CURRENCIES[self.chain]", "( self.estimate_harvest_amount(strategy) / 10 ** want.functions.decimals().call() ) self.logger.info(f\"estimated want change:", "src.discord_utils import send_error_to_discord from src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS =", "url=self.discord_url, ) except Exception as e: self.logger.error(f\"Error processing tend tx:", "abi=get_abi(self.chain, \"rewards_manager\"), ) if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is not", "\"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain], } if self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"]", "strategy_name = strategy.functions.getName().call() # TODO: update for ACL if not", "self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"), ) if", "== Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else: options[\"gasPrice\"]", "If successful, update last harvest harvest # time to make", "None, returns: bool = True, ): \"\"\"Private function to create,", "\"harvest\" ) -> dict: \"\"\"Builds transaction depending on which chain", "src.discord_utils import send_success_to_discord logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD = 0.0005", "is measured in seconds Args: strategy (contract): Vault strategy web3", "bot we're using is whitelisted for the strategy. Returns: bool:", "def tend(self, strategy: contract): strategy_name = strategy.functions.getName().call() # TODO: update", "token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas cost:", "alert user. \"\"\" strategy_name = strategy.functions.getName().call() # TODO: update for", "self.__is_keeper_whitelisted(\"tend\"): raise ValueError(\"Keeper ACL is not whitelisted for calling tend\")", "tx_hash def __send_harvest_mta_tx(self, voter_proxy: contract) -> HexBytes: \"\"\"Sends transaction to", "get_effective_gas_price from src.tx_utils import get_gas_price_of_tx from src.tx_utils import get_priority_fee from", "an error and alert user. \"\"\" strategy_name = strategy.functions.getName().call() #", "\"keeper_acl\"), ) self.base_usd_oracle: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), )", "os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool = False, discord_url: str = None, ):", "return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address: str) -> Decimal: return Decimal(", "strategy (contract, optional): Defaults to None. strategy_name (str, optional): Defaults", "chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def __send_harvest_mta_tx(self, voter_proxy: contract)", "= confirm_transaction(self.web3, tx_hash) if succeeded: gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle,", "e: self.logger.error(f\"Error in sending tend tx: {e}\") tx_hash = get_hash_from_failed_tx_error(", "{\"from\": self.keeper_address} ) ) def __get_effective_gas_price(self) -> int: if self.chain", "finally: return tx_hash def __send_harvest_mta_tx(self, voter_proxy: contract) -> HexBytes: \"\"\"Sends", "f\"Time since last harvest: {(current_time - last_harvest) / 3600}\" )", "TODO: figure out how to handle profit estimation # current_price_eth", "str, options: dict ) -> dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options)", "__process_harvest( self, strategy: contract = None, strategy_name: str = None,", "self.logger.info( f\"Time since last harvest: {(current_time - last_harvest) / 3600}\"", "Returns: bool: True if time since last harvest is >", "import requests from hexbytes import HexBytes from web3 import Web3", "current_price_eth = self.get_current_rewards_price() # self.logger.info(f\"current rewards price per token (ETH):", ") elif tx_hash != HexBytes(0): if not self.use_flashbots: # And", "transaction that was sent. \"\"\" tx_hash = HexBytes(0) try: tx", "def __process_harvest_mta( self, voter_proxy: contract, ): \"\"\"Private function to create,", "= os.getenv(\"KEEPER_ADDRESS\"), keeper_key: str = os.getenv(\"KEEPER_KEY\"), base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"),", "KeyError: return True def harvest( self, strategy: contract.Contract, ): \"\"\"Orchestration", "from config.constants import MULTICHAIN_CONFIG from config.enums import Network from src.harvester", "for transaction that was sent. \"\"\" max_target_block = None tx_hash", "self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def estimate_gas_fee( self, address: str, returns: bool", "strategy (contract) Raises: ValueError: If the keeper isn't whitelisted, throw", "e, \"Tend\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def __send_harvest_mta_tx(self,", "get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else: #", "bool: \"\"\"Checks if the bot we're using is whitelisted for", "self.estimate_harvest_amount(strategy) / 10 ** want.functions.decimals().call() ) self.logger.info(f\"estimated want change: {want_to_harvest}\")", "gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx,", "try: tx_hash = self.__send_tend_tx(strategy) succeeded, _ = confirm_transaction(self.web3, tx_hash) if", "= self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend( self, strategy: contract", "harvest if ideal want change is > 0.05% of total", "0x00. Returns: HexBytes: Transaction hash for transaction that was sent.", "(unable to communicate with node, etc.) we log the error", "HexBytes(0) try: tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx = self.web3.eth.account.sign_transaction( tx,", "at {max_target_block}\") except ValueError as e: self.logger.error(f\"Error in sending harvest", "currency = BASE_CURRENCIES[self.chain] if self.chain == Network.Fantom: price_per_want = get_token_price(", "tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address, returns=returns) signed_tx =", "self.web3, self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else: # Don't", "self.use_flashbots: self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle = [ {\"signed_transaction\": signed_tx.rawTransaction}, ] block_number", "self.__estimate_tend_gas(address) elif function == \"harvestMta\": estimated_gas = self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price", "confirmation. Args: voter_proxy (contract) Raises: Exception: If we have an", "if self.chain == Network.Fantom: price_per_want = get_token_price( want.address, currency, self.chain,", "tx_hash) if succeeded: gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle, tx_hash, self.chain", "contract.Contract, ): \"\"\"Orchestration function that harvests outstanding rewards. Args: strategy", "self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else:", "= self.__estimate_harvest_mta_gas(address) return Decimal(current_gas_price * estimated_gas) def __estimate_harvest_gas(self, strategy_address: str,", "self.get_current_rewards_price() # self.logger.info(f\"current rewards price per token (ETH): {current_price_eth}\") gas_fee", "send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash", "that harvests outstanding rewards. Args: strategy (contract) Raises: ValueError: If", "== Network.Polygon: response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\") * 1.1),", "dict: tx dictionary \"\"\" options = { \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address,", "> harvest_interval_threshold except KeyError: return True def harvest( self, strategy:", "time to make sure we don't double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx", "strategy_address ).estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address: str) ->", "= get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number - seconds_to_blocks(MAX_TIME_BETWEEN_HARVESTS), chain=self.chain, ) else:", "Returns: bool: True if our bot is whitelisted to make", "processing harvestMta tx: {e}\") send_error_to_discord( \"\", \"Harvest MTA\", error=e, chain=self.chain,", "\"Keeper ACL is not whitelisted for calling harvestNoReturn\" ) want_address", "gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash, gas_cost=gas_price_of_tx,", "= self.keeper_acl.functions.harvestNoReturn( strategy_address ).estimateGas({\"from\": self.keeper_address}) return Decimal(estimated_gas_to_harvest) def __estimate_tend_gas(self, strategy_address:", "url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=\"Harvest MTA\", tx_hash=tx_hash,", "return self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction( self, strategy_address: str, returns: bool,", "GAS_LIMITS[self.chain], } if self.chain == Network.Ethereum: options[\"maxPriorityFeePerGas\"] = get_priority_fee(self.web3) options[\"maxFeePerGas\"]", "elif function == \"rewards_manager\": key = self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call()", "error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest_mta( self, voter_proxy: contract, ):", "returns=returns) signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash", "= self.__build_transaction(strategy.address, returns=returns) signed_tx = self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash", "discord_url: str = None, ): self.logger = logging.getLogger(__name__) self.chain =", "voter_proxy: contract, ): # TODO: update for ACL if not", "None, harvested: Decimal = None, returns: bool = True, ):", "want = self.web3.eth.contract( address=strategy.functions.want().call(), abi=get_abi(self.chain, \"erc20\"), ) want_gained = self.keeper_acl.functions.harvest(strategy.address).call(", "None. \"\"\" try: tx_hash, max_target_block = self.__send_harvest_tx( strategy, returns=returns )", "call badger api to get prices currency = BASE_CURRENCIES[self.chain] if", "currency, self.chain, use_staging=True ) else: price_per_want = get_token_price(want.address, currency, self.chain)", "self.keeper_address, \"pending\" ), \"from\": self.keeper_address, \"gas\": GAS_LIMITS[self.chain], } if self.chain", "response = requests.get(\"https://gasstation-mainnet.matic.network\").json() gas_price = self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\") elif", "): self.logger = logging.getLogger(__name__) self.chain = chain self.web3 = web3", "keeper_address self.keeper_acl: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle:", "in seconds that is acceptable to not have harvested within.", "(contract): contract to use to build harvest tx Returns: dict:", "TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError( \"Keeper", ") def harvest_no_return( self, strategy: contract, ): strategy_name = strategy.functions.getName().call()", "self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) else: bundle = [ {\"signed_transaction\": signed_tx.rawTransaction}, ] block_number =", "strategy. Returns: bool: True if our bot is whitelisted to", "= self.estimate_gas_fee(strategy.address, returns=False) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now", "raise ValueError( \"Keeper ACL is not whitelisted for calling harvestNoReturn\"", "import Network from src.harvester import IHarvester from src.misc_utils import hours", "strategy: contract): self.tend(strategy) sleep(60) self.harvest(strategy) def estimate_harvest_amount(self, strategy: contract) ->", "{want_gained}\") if type(want_gained) is list: want_gained = 0 return price_per_want", "** want.functions.decimals().call() ) self.logger.info(f\"estimated want change: {want_to_harvest}\") # TODO: figure", "that is acceptable to not have harvested within. Defaults to", "import MULTICHAIN_CONFIG from config.enums import Network from src.harvester import IHarvester", "strategy web3 contract object harvest_interval_threshold (int, optional): Amount of time", "Args: strategy (contract, optional): Defaults to None. strategy_name (str, optional):", "been longer than the supplied harvest_interval_threshold which is measured in", "self.keeper_address} ) ) def __get_effective_gas_price(self) -> int: if self.chain ==", "HexBytes: Transaction hash for transaction that was sent. \"\"\" max_target_block", "confirm tx on eth and then send transaction to Discord", "self.web3.eth.account.sign_transaction( tx, private_key=self.keeper_key ) tx_hash = signed_tx.hash if not self.use_flashbots:", "discord_url def is_time_to_harvest( self, strategy: contract.Contract, harvest_interval_threshold: int = MAX_TIME_BETWEEN_HARVESTS,", "__is_keeper_whitelisted(self, function: str) -> bool: \"\"\"Checks if the bot we're", "want_gained def is_profitable(self) -> bool: # TODO: Implement this #", "= want_to_harvest / vault_balance >= HARVEST_THRESHOLD return True def __is_keeper_whitelisted(self,", "{strategy_name}\", tx_hash=tx_hash, chain=self.chain, url=self.discord_url, ) else: send_error_to_discord( strategy_name, \"Harvest\", tx_hash=tx_hash,", "\"\"\"Checks if the bot we're using is whitelisted for the", "should_harvest = self.is_profitable() self.logger.info(f\"Should we harvest: {should_harvest}\") if should_harvest: self.__process_harvest(", "self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address), abi=get_abi(self.chain, \"oracle\"), ) # Times of last harvest", "): strategy_name = strategy.functions.getName().call() self.keeper_acl = self.web3.eth.contract( address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ),", "{gas_fee}\") should_harvest_mta = self.is_profitable() self.logger.info(f\"Should we call harvestMta: {should_harvest_mta}\") if", "bool = True, ): \"\"\"Private function to create, broadcast, confirm", "was sent. \"\"\" tx_hash = HexBytes(0) try: tx = self.__build_transaction(strategy.address,", "gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): send_success_to_discord( tx_type=f\"Tend", "self.web3.toWei(int(response.get(\"fast\") * 1.1), \"gwei\") elif self.chain in [Network.Arbitrum, Network.Fantom]: gas_price", "whitelisted for calling harvestMta\") gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas", "= None, strategy_name: str = None, ): try: tx_hash =", "tx dictionary \"\"\" options = { \"nonce\": self.web3.eth.get_transaction_count( self.keeper_address, \"pending\"", "we harvest: {should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def", "get_priority_fee(self.web3) options[\"maxFeePerGas\"] = self.__get_effective_gas_price() else: options[\"gasPrice\"] = self.__get_effective_gas_price() if function", ") -> bool: \"\"\"Calculates the time between harvests for the", "contract): self.tend(strategy) sleep(60) self.harvest(strategy) def estimate_harvest_amount(self, strategy: contract) -> Decimal:", "{current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_tend(", "sending tend tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Tend\", chain=self.chain,", "returns: bool = True) -> HexBytes: \"\"\"Sends transaction to ETH", "want to total vault AUM required to harvest NUM_FLASHBOTS_BUNDLES =", "from src.misc_utils import hours from src.misc_utils import seconds_to_blocks from src.tx_utils", "want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") want_to_harvest = ( self.estimate_harvest_amount(strategy) / 10", "broadcast, confirm tx on eth and then send transaction to", "gas cost: {gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name, ) def tend_then_harvest(self, strategy:", "currency, self.chain) self.logger.info(f\"price per want: {price_per_want} {currency}\") self.logger.info(f\"want gained: {want_gained}\")", "self.estimate_gas_fee(strategy.address, function=\"tend\") self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name, )", "\"rewards_manager\"), ) if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is not whitelisted", "self.keeper_acl.functions.KEEPER_ROLE().call() return self.keeper_acl.functions.hasRole(key, self.keeper_address).call() def __process_tend( self, strategy: contract =", "base_oracle_address: str = os.getenv(\"ETH_USD_CHAINLINK\"), use_flashbots: bool = False, discord_url: str", "\"\"\" tx_hash = HexBytes(0) try: tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\") signed_tx", "{gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Tend {strategy_name}\", tx_hash=tx_hash, gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif", "build harvest tx Returns: dict: tx dictionary \"\"\" options =", "0.0005 # min ratio of want to total vault AUM", "function to create, broadcast, confirm tx on eth and then", "else False \"\"\" # Only care about harvest gas costs", "self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_no_return( self, strategy: contract, ):", "harvest: {should_harvest}\") if should_harvest: self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_rewards_manager(", "self.harvest(strategy) def estimate_harvest_amount(self, strategy: contract) -> Decimal: want = self.web3.eth.contract(", "we don't double harvest self.update_last_harvest_time(strategy.address) gas_price_of_tx = get_gas_price_of_tx( self.web3, self.base_usd_oracle,", "balance: {vault_balance}\") # TODO: figure out how to handle profit", "self.chain in [Network.Ethereum, Network.Fantom]: self.last_harvest_times = get_last_harvest_times( self.web3, self.keeper_acl, start_block=self.web3.eth.block_number", "is whitelisted to make function calls, False otherwise. \"\"\" if", "tend tx: {e}\") send_error_to_discord( strategy_name, \"Tend\", error=e, chain=self.chain, keeper_address=self.keeper_address, )", "return Decimal( self.keeper_acl.functions.harvestMta(voter_proxy_address).estimateGas( {\"from\": self.keeper_address} ) ) def __get_effective_gas_price(self) ->", "harvestMta tx: {e}\") tx_hash = get_hash_from_failed_tx_error( e, \"Harvest MTA\", chain=self.chain,", "__build_harvest_mta_transaction( self, voter_proxy_address: str, options: dict ) -> dict: return", "src.web3_utils import get_last_harvest_times from src.token_utils import get_token_price from src.discord_utils import", "None. harvested (Decimal, optional): Amount of Sushi harvested. Defaults to", "self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now we'll just harvest", "{want_to_harvest}\") # TODO: figure out how to handle profit estimation", "function == \"tend\": self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options)", "we call harvestMta: {should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self, strategy:", "return tx_hash, max_target_block def __send_tend_tx(self, strategy: contract) -> HexBytes: \"\"\"Sends", "not whitelisted for calling tend\") # TODO: figure out how", "import seconds_to_blocks from src.tx_utils import get_effective_gas_price from src.tx_utils import get_gas_price_of_tx", "_ = confirm_transaction(self.web3, tx_hash) if succeeded: gas_price_of_tx = get_gas_price_of_tx( self.web3,", "tx_hash = signed_tx.hash self.web3.eth.send_raw_transaction(signed_tx.rawTransaction) except ValueError as e: self.logger.error(f\"Error in", "logging import os from decimal import Decimal from time import", "def harvest_no_return( self, strategy: contract, ): strategy_name = strategy.functions.getName().call() #", "is_profitable(self) -> bool: # TODO: Implement this # harvest if", "def __build_transaction( self, address: str, returns: bool = True, function:", "* 1.1), \"gwei\") elif self.chain in [Network.Arbitrum, Network.Fantom]: gas_price =", ") finally: return tx_hash def __build_transaction( self, address: str, returns:", "harvest if self.chain in [Network.Ethereum, Network.Fantom]: self.last_harvest_times = get_last_harvest_times( self.web3,", "\"Tend\", chain=self.chain, keeper_address=self.keeper_address ) finally: return tx_hash def __send_harvest_mta_tx(self, voter_proxy:", "strategy=strategy, strategy_name=strategy_name, ) def harvest_rewards_manager( self, strategy: contract, ): strategy_name", "__estimate_tend_gas(self, strategy_address: str) -> Decimal: return Decimal( self.keeper_acl.functions.tend(strategy_address).estimateGas( {\"from\": self.keeper_address}", "import get_hash_from_failed_tx_error from src.web3_utils import get_last_harvest_times from src.token_utils import get_token_price", "config.constants import GAS_LIMITS from config.constants import MULTICHAIN_CONFIG from config.enums import", "self.logger.info(f\"Should we call harvestMta: {should_harvest_mta}\") if should_harvest_mta: self.__process_harvest_mta(voter_proxy) def tend(self,", "sent. \"\"\" max_target_block = None tx_hash = HexBytes(0) try: tx", "__send_tend_tx(self, strategy: contract) -> HexBytes: \"\"\"Sends transaction to ETH node", "eth and then send transaction to Discord for monitoring Args:", "for monitoring Args: voter_proxy (contract): Mstable voter proxy contract \"\"\"", "error=e, chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest( self, strategy: contract =", ").buildTransaction(options) def estimate_gas_fee( self, address: str, returns: bool = True,", "gas_cost=gas_price_of_tx, chain=self.chain, url=self.discord_url, ) elif tx_hash != HexBytes(0): if not", "try: last_harvest = self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since", "self.web3.eth.contract( address=self.web3.toChecksumAddress(keeper_acl), abi=get_abi(self.chain, \"keeper_acl\"), ) self.base_usd_oracle: Contract = self.web3.eth.contract( address=self.web3.toChecksumAddress(base_oracle_address),", "want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") # TODO: figure out how to", "transaction to Discord for monitoring Args: strategy (contract, optional): Defaults", "chain=self.chain, keeper_address=self.keeper_address, ) def __process_harvest_mta( self, voter_proxy: contract, ): \"\"\"Private", "self.__send_harvest_tx( strategy, returns=returns ) succeeded, msg = confirm_transaction( self.web3, tx_hash,", "): strategy_name = strategy.functions.getName().call() # TODO: update for ACL if", "self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_tend( strategy=strategy, strategy_name=strategy_name, ) def tend_then_harvest(self,", "to create, broadcast, confirm tx on eth and then send", "self.logger.info(f\"got gas price of tx: {gas_price_of_tx}\") send_success_to_discord( tx_type=f\"Harvest {strategy_name}\", tx_hash=tx_hash,", "gas_fee = self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas cost: {gas_fee}\") should_harvest_mta =", "{gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name, ) def harvest_mta( self, voter_proxy: contract,", "and returns true if it has been longer than the", "succeeded, _ = confirm_transaction(self.web3, tx_hash) if succeeded: # If successful,", "requires different handling for ETH txs than the other EVM", "def harvest_rewards_manager( self, strategy: contract, ): strategy_name = strategy.functions.getName().call() self.keeper_acl", ") vault_balance = want.functions.balanceOf(strategy.address).call() self.logger.info(f\"vault balance: {vault_balance}\") gas_fee = self.estimate_gas_fee(strategy.address)", "self.logger.info(f\"current rewards price per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address,", ") want_address = strategy.functions.want().call() want = self.web3.eth.contract( address=want_address, abi=get_abi(self.chain, \"erc20\"),", "== Network.Fantom: price_per_want = get_token_price( want.address, currency, self.chain, use_staging=True )", "TODO: Implement this # harvest if ideal want change is", "self.estimate_gas_fee(voter_proxy.address, function=\"harvestMta\") self.logger.info(f\"estimated gas cost: {gas_fee}\") should_harvest_mta = self.is_profitable() self.logger.info(f\"Should", "str = None, ): self.logger = logging.getLogger(__name__) self.chain = chain", "we're using is whitelisted for the strategy. Returns: bool: True", "sent. \"\"\" tx_hash = HexBytes(0) try: tx = self.__build_transaction(voter_proxy.address, function=\"harvestMta\")", ") def __process_harvest( self, strategy: contract = None, strategy_name: str", "required to harvest NUM_FLASHBOTS_BUNDLES = 6 class GeneralHarvester(IHarvester): def __init__(", "is not whitelisted for calling harvestNoReturn\" ) want_address = strategy.functions.want().call()", "address=self.web3.toChecksumAddress( MULTICHAIN_CONFIG[self.chain][\"rewards_manager\"] ), abi=get_abi(self.chain, \"rewards_manager\"), ) if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise", "last_harvest > harvest_interval_threshold except KeyError: return True def harvest( self,", "function == \"harvest\": self.logger.info( f\"estimated gas fee: {self.__estimate_harvest_gas(address, returns)}\" )", "\"tend\": self.logger.info(f\"estimated gas fee: {self.__estimate_tend_gas(address)}\") return self.__build_tend_transaction(address, options) elif function", "import get_effective_gas_price from src.tx_utils import get_gas_price_of_tx from src.tx_utils import get_priority_fee", "succeeded, msg = confirm_transaction( self.web3, tx_hash, max_block=max_target_block ) if succeeded:", "harvest_interval_threshold, else False \"\"\" # Only care about harvest gas", "str, options: dict) -> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def", "seconds that is acceptable to not have harvested within. Defaults", "the other EVM chains. Args: contract (contract): contract to use", "strategy (contract): Vault strategy web3 contract object harvest_interval_threshold (int, optional):", "per token (ETH): {current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost:", "str = None, ): try: tx_hash = self.__send_tend_tx(strategy) succeeded, _", "to Discord for monitoring Args: voter_proxy (contract): Mstable voter proxy", "= self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") self.__process_harvest( strategy=strategy, strategy_name=strategy_name, )", "return True try: last_harvest = self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info(", "logging.basicConfig(level=logging.INFO) MAX_TIME_BETWEEN_HARVESTS = hours(120) HARVEST_THRESHOLD = 0.0005 # min ratio", "src.misc_utils import hours from src.misc_utils import seconds_to_blocks from src.tx_utils import", "self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for now we'll just", "{current_price_eth}\") gas_fee = self.estimate_gas_fee(strategy.address) self.logger.info(f\"estimated gas cost: {gas_fee}\") # for", "-> dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def __build_harvest_mta_transaction( self, voter_proxy_address:", ") finally: return tx_hash def __send_harvest_mta_tx(self, voter_proxy: contract) -> HexBytes:", "dict: return self.keeper_acl.functions.harvestMta( voter_proxy_address ).buildTransaction(options) def estimate_gas_fee( self, address: str,", "\"\"\"Sends transaction to ETH node for confirmation. Args: voter_proxy (contract)", "If we have an issue sending transaction (unable to communicate", "EIP-1559 requires different handling for ETH txs than the other", "dict: return self.keeper_acl.functions.tend(strategy_address).buildTransaction( options ) def __build_harvest_mta_transaction( self, voter_proxy_address: str,", "3600}\" ) return current_time - last_harvest > harvest_interval_threshold except KeyError:", "# TODO: update for ACL if not self.__is_keeper_whitelisted(\"harvestNoReturn\"): raise ValueError(", "processing harvest tx: {e}\") send_error_to_discord( strategy_name, \"Harvest\", error=e, chain=self.chain, keeper_address=self.keeper_address,", "= self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since last harvest: {(current_time - last_harvest)", ") def tend_then_harvest(self, strategy: contract): self.tend(strategy) sleep(60) self.harvest(strategy) def estimate_harvest_amount(self,", "about harvest gas costs on eth if self.chain not in", "self.last_harvest_times[strategy.address] current_time = self.web3.eth.get_block(\"latest\")[\"timestamp\"] self.logger.info( f\"Time since last harvest: {(current_time", "gas fee: {self.__estimate_harvest_mta_gas(address)}\" ) return self.__build_harvest_mta_transaction(address, options) def __build_harvest_transaction( self,", "gas cost: {gas_fee}\") should_harvest_mta = self.is_profitable() self.logger.info(f\"Should we call harvestMta:", "HexBytes(0) try: tx = self.__build_transaction(strategy.address, returns=returns) signed_tx = self.web3.eth.account.sign_transaction( tx,", "if not self.__is_keeper_whitelisted(\"rewards_manager\"): raise ValueError(f\"Keeper is not whitelisted for {strategy_name}\")", "tx_hash def __build_transaction( self, address: str, returns: bool = True,", "self.web3.flashbots.send_bundle( bundle, target_block_number=block_number + i ) max_target_block = block_number +", "__send_harvest_mta_tx(self, voter_proxy: contract) -> HexBytes: \"\"\"Sends transaction to ETH node", "elif self.chain in [Network.Arbitrum, Network.Fantom]: gas_price = int(1.1 * self.web3.eth.gas_price)", "api to get prices currency = BASE_CURRENCIES[self.chain] if self.chain ==", "sleep import requests from hexbytes import HexBytes from web3 import", "if the bot we're using is whitelisted for the strategy.", "Raises: Exception: If we have an issue sending transaction (unable", ">= HARVEST_THRESHOLD return True def __is_keeper_whitelisted(self, function: str) -> bool:" ]
[ "is_in_partition_key self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key = is_in_sampling_key", "str, type: str, default_kind: str, default_expression: str, comment: str, compression_codec:", "column \"\"\" def __init__( self, database: str, table: str, name:", "name: str, type: str, default_kind: str, default_expression: str, comment: str,", "is_in_sorting_key: bool, is_in_primary_key: bool, is_in_sampling_key: bool, ): self.database = database", "= is_in_sampling_key @property def db_table(self): return \"{}.{}\".format(self.database, self.table) def __str__(self):", "is_in_sorting_key self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key = is_in_sampling_key @property def db_table(self):", "= default_kind self.default_expression = default_expression self.comment = comment self.compression_codec =", "def __init__( self, database: str, table: str, name: str, type:", "type self.default_kind = default_kind self.default_expression = default_expression self.comment = comment", "self, database: str, table: str, name: str, type: str, default_kind:", "= type self.default_kind = default_kind self.default_expression = default_expression self.comment =", "Copyright (C) 2020 <NAME> class Column(object): \"\"\" Represents ClickHouse column", "<NAME> class Column(object): \"\"\" Represents ClickHouse column \"\"\" def __init__(", "Column(object): \"\"\" Represents ClickHouse column \"\"\" def __init__( self, database:", "default_kind self.default_expression = default_expression self.comment = comment self.compression_codec = compression_codec", "str, default_kind: str, default_expression: str, comment: str, compression_codec: str, is_in_partition_key:", "#!/usr/bin/env python # License: Apache-2.0 # Copyright (C) 2020 <NAME>", "Represents ClickHouse column \"\"\" def __init__( self, database: str, table:", "= comment self.compression_codec = compression_codec self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key =", "str, name: str, type: str, default_kind: str, default_expression: str, comment:", "comment self.compression_codec = compression_codec self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key = is_in_sorting_key", "table self.name = name self.type = type self.default_kind = default_kind", "= compression_codec self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key =", "= is_in_primary_key self.is_in_sampling_key = is_in_sampling_key @property def db_table(self): return \"{}.{}\".format(self.database,", "self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key = is_in_sampling_key @property def db_table(self): return", "# Copyright (C) 2020 <NAME> class Column(object): \"\"\" Represents ClickHouse", "str, default_expression: str, comment: str, compression_codec: str, is_in_partition_key: bool, is_in_sorting_key:", "= is_in_sorting_key self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key = is_in_sampling_key @property def", "= name self.type = type self.default_kind = default_kind self.default_expression =", "str, comment: str, compression_codec: str, is_in_partition_key: bool, is_in_sorting_key: bool, is_in_primary_key:", "__init__( self, database: str, table: str, name: str, type: str,", "type: str, default_kind: str, default_expression: str, comment: str, compression_codec: str,", "name self.type = type self.default_kind = default_kind self.default_expression = default_expression", "self.name = name self.type = type self.default_kind = default_kind self.default_expression", "self.default_expression = default_expression self.comment = comment self.compression_codec = compression_codec self.is_in_partition_key", "self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key = is_in_sampling_key @property", "2020 <NAME> class Column(object): \"\"\" Represents ClickHouse column \"\"\" def", "bool, ): self.database = database self.table = table self.name =", "= table self.name = name self.type = type self.default_kind =", "@property def db_table(self): return \"{}.{}\".format(self.database, self.table) def __str__(self): return self.name", "table: str, name: str, type: str, default_kind: str, default_expression: str,", "is_in_partition_key: bool, is_in_sorting_key: bool, is_in_primary_key: bool, is_in_sampling_key: bool, ): self.database", "): self.database = database self.table = table self.name = name", "str, table: str, name: str, type: str, default_kind: str, default_expression:", "is_in_sampling_key: bool, ): self.database = database self.table = table self.name", "class Column(object): \"\"\" Represents ClickHouse column \"\"\" def __init__( self,", "default_expression: str, comment: str, compression_codec: str, is_in_partition_key: bool, is_in_sorting_key: bool,", "database: str, table: str, name: str, type: str, default_kind: str,", "self.comment = comment self.compression_codec = compression_codec self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key", "comment: str, compression_codec: str, is_in_partition_key: bool, is_in_sorting_key: bool, is_in_primary_key: bool,", "bool, is_in_primary_key: bool, is_in_sampling_key: bool, ): self.database = database self.table", "self.compression_codec = compression_codec self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key", "= is_in_partition_key self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key =", "= database self.table = table self.name = name self.type =", "License: Apache-2.0 # Copyright (C) 2020 <NAME> class Column(object): \"\"\"", "\"\"\" Represents ClickHouse column \"\"\" def __init__( self, database: str,", "compression_codec: str, is_in_partition_key: bool, is_in_sorting_key: bool, is_in_primary_key: bool, is_in_sampling_key: bool,", "compression_codec self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key = is_in_primary_key", "is_in_sampling_key @property def db_table(self): return \"{}.{}\".format(self.database, self.table) def __str__(self): return", "bool, is_in_sorting_key: bool, is_in_primary_key: bool, is_in_sampling_key: bool, ): self.database =", "python # License: Apache-2.0 # Copyright (C) 2020 <NAME> class", "is_in_primary_key: bool, is_in_sampling_key: bool, ): self.database = database self.table =", "bool, is_in_sampling_key: bool, ): self.database = database self.table = table", "self.type = type self.default_kind = default_kind self.default_expression = default_expression self.comment", "Apache-2.0 # Copyright (C) 2020 <NAME> class Column(object): \"\"\" Represents", "str, compression_codec: str, is_in_partition_key: bool, is_in_sorting_key: bool, is_in_primary_key: bool, is_in_sampling_key:", "self.default_kind = default_kind self.default_expression = default_expression self.comment = comment self.compression_codec", "ClickHouse column \"\"\" def __init__( self, database: str, table: str,", "(C) 2020 <NAME> class Column(object): \"\"\" Represents ClickHouse column \"\"\"", "<reponame>yonesko/clickhouse-plantuml #!/usr/bin/env python # License: Apache-2.0 # Copyright (C) 2020", "self.is_in_sampling_key = is_in_sampling_key @property def db_table(self): return \"{}.{}\".format(self.database, self.table) def", "self.database = database self.table = table self.name = name self.type", "\"\"\" def __init__( self, database: str, table: str, name: str,", "database self.table = table self.name = name self.type = type", "# License: Apache-2.0 # Copyright (C) 2020 <NAME> class Column(object):", "str, is_in_partition_key: bool, is_in_sorting_key: bool, is_in_primary_key: bool, is_in_sampling_key: bool, ):", "= default_expression self.comment = comment self.compression_codec = compression_codec self.is_in_partition_key =", "default_kind: str, default_expression: str, comment: str, compression_codec: str, is_in_partition_key: bool,", "self.is_in_partition_key = is_in_partition_key self.is_in_sorting_key = is_in_sorting_key self.is_in_primary_key = is_in_primary_key self.is_in_sampling_key", "is_in_primary_key self.is_in_sampling_key = is_in_sampling_key @property def db_table(self): return \"{}.{}\".format(self.database, self.table)", "self.table = table self.name = name self.type = type self.default_kind", "default_expression self.comment = comment self.compression_codec = compression_codec self.is_in_partition_key = is_in_partition_key" ]
[ "#!/usr/bin/env python3 import os import sys MAX = 8 fpath", "i + 1 print(\"};\") print(\"\") print(\"unsigned int %s_sz = %s;\"", "% (ord(c), )) i = i + 1 print(\"};\") print(\"\")", "i = 0 while True: if i > 0: sys.stdout.write(\",", "sys.argv[2] with open(fpath, \"rb\") as fh: sys.stdout.write(\"char %s[] = {\"", "\"rb\") as fh: sys.stdout.write(\"char %s[] = {\" % (name,) )", "True: if i > 0: sys.stdout.write(\", \") if i %", "while True: if i > 0: sys.stdout.write(\", \") if i", "sys.stdout.write(\"0x%.2x\" % (ord(c), )) i = i + 1 print(\"};\")", "if i % MAX == 0: sys.stdout.write(\"\\n\\t\") c = fh.read(1)", ") i = 0 while True: if i > 0:", "= 8 fpath = sys.argv[1] name = sys.argv[2] with open(fpath,", ")) i = i + 1 print(\"};\") print(\"\") print(\"unsigned int", "1 print(\"};\") print(\"\") print(\"unsigned int %s_sz = %s;\" % (name,", "as fh: sys.stdout.write(\"char %s[] = {\" % (name,) ) i", "MAX == 0: sys.stdout.write(\"\\n\\t\") c = fh.read(1) if not c:", "= sys.argv[2] with open(fpath, \"rb\") as fh: sys.stdout.write(\"char %s[] =", "i = i + 1 print(\"};\") print(\"\") print(\"unsigned int %s_sz", "print(\"\") print(\"unsigned int %s_sz = %s;\" % (name, i)) print(\"\")", "sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" % (ord(c), )) i = i +", "\") if i % MAX == 0: sys.stdout.write(\"\\n\\t\") c =", "%s[] = {\" % (name,) ) i = 0 while", "= {\" % (name,) ) i = 0 while True:", "sys.stdout.write(\", \") if i % MAX == 0: sys.stdout.write(\"\\n\\t\") c", "sys MAX = 8 fpath = sys.argv[1] name = sys.argv[2]", "> 0: sys.stdout.write(\", \") if i % MAX == 0:", "== 0: sys.stdout.write(\"\\n\\t\") c = fh.read(1) if not c: sys.stdout.write(\"\\n\")", "python3 import os import sys MAX = 8 fpath =", "<reponame>pmalhaire/WireHub #!/usr/bin/env python3 import os import sys MAX = 8", "break sys.stdout.write(\"0x%.2x\" % (ord(c), )) i = i + 1", "= fh.read(1) if not c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" % (ord(c),", "% (name,) ) i = 0 while True: if i", "c = fh.read(1) if not c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" %", "sys.argv[1] name = sys.argv[2] with open(fpath, \"rb\") as fh: sys.stdout.write(\"char", "import os import sys MAX = 8 fpath = sys.argv[1]", "{\" % (name,) ) i = 0 while True: if", "% MAX == 0: sys.stdout.write(\"\\n\\t\") c = fh.read(1) if not", "MAX = 8 fpath = sys.argv[1] name = sys.argv[2] with", "0: sys.stdout.write(\", \") if i % MAX == 0: sys.stdout.write(\"\\n\\t\")", "0: sys.stdout.write(\"\\n\\t\") c = fh.read(1) if not c: sys.stdout.write(\"\\n\") break", "= sys.argv[1] name = sys.argv[2] with open(fpath, \"rb\") as fh:", "fh.read(1) if not c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" % (ord(c), ))", "fpath = sys.argv[1] name = sys.argv[2] with open(fpath, \"rb\") as", "0 while True: if i > 0: sys.stdout.write(\", \") if", "with open(fpath, \"rb\") as fh: sys.stdout.write(\"char %s[] = {\" %", "= i + 1 print(\"};\") print(\"\") print(\"unsigned int %s_sz =", "c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" % (ord(c), )) i = i", "name = sys.argv[2] with open(fpath, \"rb\") as fh: sys.stdout.write(\"char %s[]", "(ord(c), )) i = i + 1 print(\"};\") print(\"\") print(\"unsigned", "not c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" % (ord(c), )) i =", "+ 1 print(\"};\") print(\"\") print(\"unsigned int %s_sz = %s;\" %", "fh: sys.stdout.write(\"char %s[] = {\" % (name,) ) i =", "sys.stdout.write(\"char %s[] = {\" % (name,) ) i = 0", "if i > 0: sys.stdout.write(\", \") if i % MAX", "import sys MAX = 8 fpath = sys.argv[1] name =", "(name,) ) i = 0 while True: if i >", "sys.stdout.write(\"\\n\\t\") c = fh.read(1) if not c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\"", "i > 0: sys.stdout.write(\", \") if i % MAX ==", "i % MAX == 0: sys.stdout.write(\"\\n\\t\") c = fh.read(1) if", "if not c: sys.stdout.write(\"\\n\") break sys.stdout.write(\"0x%.2x\" % (ord(c), )) i", "open(fpath, \"rb\") as fh: sys.stdout.write(\"char %s[] = {\" % (name,)", "os import sys MAX = 8 fpath = sys.argv[1] name", "print(\"};\") print(\"\") print(\"unsigned int %s_sz = %s;\" % (name, i))", "= 0 while True: if i > 0: sys.stdout.write(\", \")", "8 fpath = sys.argv[1] name = sys.argv[2] with open(fpath, \"rb\")" ]
[ "on_next = ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe", "== [] assert xs.subscriptions == [subscribe(200, 250)] def test_skip_dispose_after(self): scheduler", "ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed = ReactiveTest.subscribed", "== [subscribe(200, 690)] def test_skip_dispose_before(self): scheduler = TestScheduler() xs =", "= scheduler.start(create, disposed=400) assert results.messages == [ on_next(280, 1), on_next(300,", "assert xs.subscriptions == [subscribe(200, 250)] def test_skip_dispose_after(self): scheduler = TestScheduler()", "on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690), ] assert xs.subscriptions", "on_next(70, 6), on_next(150, 4), on_next(210, 9), on_next(230, 13), on_next(270, 7),", "return xs.pipe(ops.skip(3)) results = scheduler.start(create) assert results.messages == [ on_next(280,", "TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "subscribe = ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed = ReactiveTest.disposed created", "4), on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280, 1), on_next(300,", "on_completed(690), ) def create(): return xs.pipe(ops.skip(10)) results = scheduler.start(create) assert", "8), on_next(370, 11), ] assert xs.subscriptions == [subscribe(200, 400)] if", "import operators as ops from reactivex.testing import ReactiveTest, TestScheduler on_next", "10), on_completed(690), ) def create(): return xs.pipe(ops.skip(10)) results = scheduler.start(create)", "assert results.messages == [] assert xs.subscriptions == [subscribe(200, 250)] def", "== [subscribe(200, 690)] def test_skip_complete_before(self): scheduler = TestScheduler() xs =", "-100), on_next(580, -3), on_next(590, 5), on_next(630, 10), ) def create():", "5), on_next(630, 10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(20))", "return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages == [on_error(690, ex)]", "test_skip_Complete_zero(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "def test_skip_complete_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "10), ) def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=250)", "on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(20)) results =", "ex), ) def create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert", "= scheduler.start(create, disposed=250) assert results.messages == [] assert xs.subscriptions ==", "results = scheduler.start(create) assert results.messages == [on_completed(690)] assert xs.subscriptions ==", "def test_skip_error_before(self): ex = \"ex\" scheduler = TestScheduler() xs =", "results = scheduler.start(create) assert results.messages == [on_error(690, ex)] assert xs.subscriptions", "690)] def test_skip_complete_same(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_dispose_before(self): scheduler =", "xs.subscriptions == [subscribe(200, 690)] def test_skip_dispose_before(self): scheduler = TestScheduler() xs", "def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=400) assert results.messages", "results.messages == [] assert xs.subscriptions == [subscribe(200, 250)] def test_skip_dispose_after(self):", "test_skip_complete_same(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "10), ) def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=400)", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_same(self): scheduler = TestScheduler()", "= scheduler.start(create) assert results.messages == [ on_next(460, 72), on_next(510, 76),", "scheduler.start(create) assert results.messages == [ on_next(210, 9), on_next(230, 13), on_next(270,", "ReactiveTest.subscribed disposed = ReactiveTest.disposed created = ReactiveTest.created class TestSkip(unittest.TestCase): def", "class TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "on_next(560, 32), on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10),", "on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690), ) def create():", "= scheduler.start(create) assert results.messages == [ on_next(280, 1), on_next(300, -1),", "3), on_next(340, 8), on_next(370, 11), on_next(410, 15), on_next(415, 16), on_next(460,", "on_next(370, 11), on_next(410, 15), on_next(415, 16), on_next(460, 72), on_next(510, 76),", "test_skip_error_after(self): ex = \"ex\" scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "scheduler.start(create, disposed=250) assert results.messages == [] assert xs.subscriptions == [subscribe(200,", "results.messages == [ on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340,", "results = scheduler.start(create, disposed=250) assert results.messages == [] assert xs.subscriptions", "def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create) assert results.messages ==", "results = scheduler.start(create, disposed=400) assert results.messages == [ on_next(280, 1),", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_before(self): ex = \"ex\"", "15), on_next(415, 16), on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570,", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_Complete_zero(self): scheduler = TestScheduler()", "10), on_completed(690), ] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_Complete_zero(self):", "test_skip_error_same(self): ex = \"ex\" scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_before(self): scheduler = TestScheduler() xs", "] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_after(self): ex =", "operators as ops from reactivex.testing import ReactiveTest, TestScheduler on_next =", "16), on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570, -100), on_next(580,", "ex), ) def create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert", ") def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=400) assert", "def test_skip_error_after(self): ex = \"ex\" scheduler = TestScheduler() xs =", "results = scheduler.start(create) assert results.messages == [ on_next(460, 72), on_next(510,", "on_completed = ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed", "xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages == [on_completed(690)] assert xs.subscriptions", "= scheduler.start(create) assert results.messages == [on_error(690, ex)] assert xs.subscriptions ==", "results.messages == [on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_same(self):", "on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(17)) results =", "10), on_completed(690), ) def create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create)", "ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed = ReactiveTest.disposed", "== [subscribe(200, 690)] def test_skip_error_before(self): ex = \"ex\" scheduler =", ") def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=250) assert", "on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280, 1), on_next(300, -1),", "-100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_error(690, ex), ]", "on_next(590, 5), on_next(630, 10), ) def create(): return xs.pipe(ops.skip(3)) results", "690)] def test_skip_dispose_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "5), on_next(630, 10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(3))", "on_next(580, -3), on_next(590, 5), on_next(630, 10), on_error(690, ex), ] assert", "xs.subscriptions == [subscribe(200, 690)] def test_skip_error_same(self): ex = \"ex\" scheduler", "10), on_completed(690), ] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_after(self):", "690)] def test_skip_error_before(self): ex = \"ex\" scheduler = TestScheduler() xs", "def test_skip_error_same(self): ex = \"ex\" scheduler = TestScheduler() xs =", "-3), on_next(590, 5), on_next(630, 10), on_error(690, ex), ) def create():", "reactivex.testing import ReactiveTest, TestScheduler on_next = ReactiveTest.on_next on_completed = ReactiveTest.on_completed", "results.messages == [on_error(690, ex)] assert xs.subscriptions == [subscribe(200, 690)] def", "== [subscribe(200, 690)] def test_skip_error_same(self): ex = \"ex\" scheduler =", "-1), on_next(310, 3), on_next(340, 8), on_next(370, 11), on_next(410, 15), on_next(415,", "on_next(590, 5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(10))", "[subscribe(200, 690)] def test_skip_Complete_zero(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages == [on_error(690,", "32), on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), )", "return xs.pipe(ops.skip(10)) results = scheduler.start(create) assert results.messages == [ on_next(460,", "on_next(630, 10), on_completed(690), ] assert xs.subscriptions == [subscribe(200, 690)] def", "xs.subscriptions == [subscribe(200, 690)] def test_skip_error_after(self): ex = \"ex\" scheduler", "== [on_error(690, ex)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_before(self):", "[on_error(690, ex)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_before(self): ex", "[] assert xs.subscriptions == [subscribe(200, 250)] def test_skip_dispose_after(self): scheduler =", "scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150, 4),", "= ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed = ReactiveTest.disposed created =", "create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages == [on_error(690,", "== [subscribe(200, 690)] def test_skip_Complete_zero(self): scheduler = TestScheduler() xs =", "return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages == [on_completed(690)] assert", "-3), on_next(590, 5), on_next(630, 10), on_completed(690), ) def create(): return", "assert results.messages == [ on_next(210, 9), on_next(230, 13), on_next(270, 7),", "250)] def test_skip_dispose_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "test_skip_dispose_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "test_skip_complete_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "xs.subscriptions == [subscribe(200, 250)] def test_skip_dispose_after(self): scheduler = TestScheduler() xs", "[subscribe(200, 690)] def test_skip_error_after(self): ex = \"ex\" scheduler = TestScheduler()", "on_next(510, 76), on_next(560, 32), on_next(570, -100), on_next(580, -3), on_next(590, 5),", "test_skip_dispose_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "= ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed =", "def create(): return xs.pipe(ops.skip(0)) results = scheduler.start(create) assert results.messages ==", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_dispose_before(self): scheduler = TestScheduler()", "on_next(630, 10), on_error(690, ex), ] assert xs.subscriptions == [subscribe(200, 690)]", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_after(self): ex = \"ex\"", "76), on_next(560, 32), on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630,", "ReactiveTest.disposed created = ReactiveTest.created class TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler =", "= ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed =", "on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(0)) results =", "unittest from reactivex import operators as ops from reactivex.testing import", "10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(3)) results =", "== [subscribe(200, 690)] def test_skip_error_after(self): ex = \"ex\" scheduler =", "[subscribe(200, 690)] def test_skip_dispose_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "def create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages ==", "def test_skip_complete_same(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "on_next(630, 10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(20)) results", "on_next(150, 4), on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280, 1),", "[on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_same(self): scheduler =", "== [ on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570, -100),", "[on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_before(self): scheduler =", "return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages == [on_error(690, ex)]", "5), on_next(630, 10), ) def create(): return xs.pipe(ops.skip(3)) results =", "def test_skip_Complete_zero(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "== [ on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340, 8),", "= scheduler.create_hot_observable( on_next(70, 6), on_next(150, 4), on_next(210, 9), on_next(230, 13),", "on_error(690, ex), ) def create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create)", ") def create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages", "def test_skip_dispose_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "on_completed(690), ] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_Complete_zero(self): scheduler", "on_completed(690), ) def create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert", "690)] def test_skip_complete_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "scheduler.start(create) assert results.messages == [on_error(690, ex)] assert xs.subscriptions == [subscribe(200,", "on_next(415, 16), on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570, -100),", "== [on_error(690, ex)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_same(self):", "scheduler.start(create, disposed=400) assert results.messages == [ on_next(280, 1), on_next(300, -1),", "-1), on_next(310, 3), on_next(340, 8), on_next(370, 11), ] assert xs.subscriptions", "] assert xs.subscriptions == [subscribe(200, 400)] if __name__ == \"__main__\":", "on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370, 11),", "5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(20)) results", "on_next(310, 3), on_next(340, 8), on_next(370, 11), ] assert xs.subscriptions ==", "ex)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_same(self): ex =", "690)] def test_skip_error_after(self): ex = \"ex\" scheduler = TestScheduler() xs", "reactivex import operators as ops from reactivex.testing import ReactiveTest, TestScheduler", "== [on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_same(self): scheduler", "[subscribe(200, 250)] def test_skip_dispose_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "13), on_next(270, 7), on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340,", "-100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690), ) def", "-100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690), ] assert", "from reactivex.testing import ReactiveTest, TestScheduler on_next = ReactiveTest.on_next on_completed =", "on_error(690, ex), ) def create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create)", "import ReactiveTest, TestScheduler on_next = ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error", "= TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150, 4), on_next(210,", "= scheduler.start(create) assert results.messages == [on_completed(690)] assert xs.subscriptions == [subscribe(200,", "create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=250) assert results.messages ==", "5), on_next(630, 10), on_error(690, ex), ] assert xs.subscriptions == [subscribe(200,", "on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(10)) results =", "5), on_next(630, 10), on_completed(690), ] assert xs.subscriptions == [subscribe(200, 690)]", "on_next(410, 15), on_next(415, 16), on_next(460, 72), on_next(510, 76), on_next(560, 32),", "[ on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280, 1), on_next(300,", "def create(): return xs.pipe(ops.skip(10)) results = scheduler.start(create) assert results.messages ==", "[subscribe(200, 690)] def test_skip_error_same(self): ex = \"ex\" scheduler = TestScheduler()", "on_next(270, 7), on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340, 8),", "== [ on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280, 1),", "-100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_error(690, ex), )", "return xs.pipe(ops.skip(0)) results = scheduler.start(create) assert results.messages == [ on_next(210,", "5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(17)) results", "return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=250) assert results.messages == []", "create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages == [on_completed(690)]", "<reponame>christiansandberg/RxPY import unittest from reactivex import operators as ops from", "on_next(340, 8), on_next(370, 11), ] assert xs.subscriptions == [subscribe(200, 400)]", "5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(10)) results", "on_completed(690), ) def create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert", "= ReactiveTest.created class TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler = TestScheduler() xs", "on_error(690, ex), ] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_dispose_before(self):", "on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570, -100), on_next(580, -3),", "import unittest from reactivex import operators as ops from reactivex.testing", "on_next(590, 5), on_next(630, 10), on_completed(690), ] assert xs.subscriptions == [subscribe(200,", "assert results.messages == [ on_next(460, 72), on_next(510, 76), on_next(560, 32),", "10), on_completed(690), ) def create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create)", "-3), on_next(590, 5), on_next(630, 10), on_completed(690), ] assert xs.subscriptions ==", ") def create(): return xs.pipe(ops.skip(0)) results = scheduler.start(create) assert results.messages", "on_next(590, 5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(0))", "results.messages == [ on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570,", "on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370, 11), ] assert", "ex), ] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_dispose_before(self): scheduler", "scheduler.start(create) assert results.messages == [ on_next(280, 1), on_next(300, -1), on_next(310,", "xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages == [on_error(690, ex)] assert", "3), on_next(340, 8), on_next(370, 11), ] assert xs.subscriptions == [subscribe(200,", "on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370, 11), on_next(410, 15),", "on_next(590, 5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(17))", "results = scheduler.start(create) assert results.messages == [ on_next(210, 9), on_next(230,", "[subscribe(200, 690)] def test_skip_error_before(self): ex = \"ex\" scheduler = TestScheduler()", "on_error(690, ex), ) def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create)", "11), on_next(410, 15), on_next(415, 16), on_next(460, 72), on_next(510, 76), on_next(560,", "== [subscribe(200, 250)] def test_skip_dispose_after(self): scheduler = TestScheduler() xs =", "ReactiveTest.created class TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler = TestScheduler() xs =", "11), ] assert xs.subscriptions == [subscribe(200, 400)] if __name__ ==", "results.messages == [ on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280,", "5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(0)) results", "on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690), )", "disposed = ReactiveTest.disposed created = ReactiveTest.created class TestSkip(unittest.TestCase): def test_skip_complete_after(self):", "def create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages ==", "7), on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370,", "== [subscribe(200, 690)] def test_skip_complete_same(self): scheduler = TestScheduler() xs =", "ex)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_before(self): ex =", "as ops from reactivex.testing import ReactiveTest, TestScheduler on_next = ReactiveTest.on_next", "ex), ) def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create) assert", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_before(self): scheduler = TestScheduler()", "on_next(590, 5), on_next(630, 10), on_error(690, ex), ] assert xs.subscriptions ==", ") def create(): return xs.pipe(ops.skip(10)) results = scheduler.start(create) assert results.messages", "\"ex\" scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "create(): return xs.pipe(ops.skip(10)) results = scheduler.start(create) assert results.messages == [", "on_error = ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed", "8), on_next(370, 11), on_next(410, 15), on_next(415, 16), on_next(460, 72), on_next(510,", "= \"ex\" scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "== [on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_before(self): scheduler", "on_next(630, 10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(17)) results", "on_next(580, -3), on_next(590, 5), on_next(630, 10), on_error(690, ex), ) def", "on_next(590, 5), on_next(630, 10), on_error(690, ex), ) def create(): return", "scheduler.start(create) assert results.messages == [on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)]", "= scheduler.start(create) assert results.messages == [ on_next(210, 9), on_next(230, 13),", "TestScheduler on_next = ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error = ReactiveTest.on_error", "on_next(370, 11), ] assert xs.subscriptions == [subscribe(200, 400)] if __name__", "on_next(580, -3), on_next(590, 5), on_next(630, 10), ) def create(): return", "assert xs.subscriptions == [subscribe(200, 400)] if __name__ == \"__main__\": unittest.main()", "TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150, 4), on_next(210, 9),", "scheduler.create_hot_observable( on_next(70, 6), on_next(150, 4), on_next(210, 9), on_next(230, 13), on_next(270,", "1), on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370, 11), ]", "1), on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370, 11), on_next(410,", "created = ReactiveTest.created class TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler = TestScheduler()", "results = scheduler.start(create) assert results.messages == [ on_next(280, 1), on_next(300,", "690)] def test_skip_Complete_zero(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_same(self): ex = \"ex\"", "create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create) assert results.messages == [", "assert results.messages == [ on_next(280, 1), on_next(300, -1), on_next(310, 3),", "def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=250) assert results.messages", "test_skip_complete_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150,", "on_next(630, 10), ) def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create,", "xs.pipe(ops.skip(3)) results = scheduler.start(create) assert results.messages == [ on_next(280, 1),", "xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages == [on_error(690, ex)] assert", "[ on_next(280, 1), on_next(300, -1), on_next(310, 3), on_next(340, 8), on_next(370,", "results.messages == [on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_before(self):", "on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_error(690, ex),", "[ on_next(460, 72), on_next(510, 76), on_next(560, 32), on_next(570, -100), on_next(580,", "create(): return xs.pipe(ops.skip(0)) results = scheduler.start(create) assert results.messages == [", "ops from reactivex.testing import ReactiveTest, TestScheduler on_next = ReactiveTest.on_next on_completed", "[on_error(690, ex)] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_same(self): ex", "xs = scheduler.create_hot_observable( on_next(70, 6), on_next(150, 4), on_next(210, 9), on_next(230,", "= ReactiveTest.subscribed disposed = ReactiveTest.disposed created = ReactiveTest.created class TestSkip(unittest.TestCase):", "= ReactiveTest.disposed created = ReactiveTest.created class TestSkip(unittest.TestCase): def test_skip_complete_after(self): scheduler", "disposed=250) assert results.messages == [] assert xs.subscriptions == [subscribe(200, 250)]", "return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=400) assert results.messages == [", "assert results.messages == [on_completed(690)] assert xs.subscriptions == [subscribe(200, 690)] def", "xs.pipe(ops.skip(10)) results = scheduler.start(create) assert results.messages == [ on_next(460, 72),", "from reactivex import operators as ops from reactivex.testing import ReactiveTest,", "test_skip_error_before(self): ex = \"ex\" scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "def test_skip_dispose_after(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "on_next(310, 3), on_next(340, 8), on_next(370, 11), on_next(410, 15), on_next(415, 16),", ") def create(): return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages", "72), on_next(510, 76), on_next(560, 32), on_next(570, -100), on_next(580, -3), on_next(590,", "] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_Complete_zero(self): scheduler =", ") def create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create) assert results.messages", "-3), on_next(590, 5), on_next(630, 10), on_error(690, ex), ] assert xs.subscriptions", "xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages == [on_completed(690)] assert xs.subscriptions", "on_completed(690), ] assert xs.subscriptions == [subscribe(200, 690)] def test_skip_error_after(self): ex", "assert results.messages == [on_error(690, ex)] assert xs.subscriptions == [subscribe(200, 690)]", "xs.subscriptions == [subscribe(200, 690)] def test_skip_complete_same(self): scheduler = TestScheduler() xs", "ReactiveTest, TestScheduler on_next = ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error =", "-3), on_next(590, 5), on_next(630, 10), ) def create(): return xs.pipe(ops.skip(3))", "on_next(340, 8), on_next(370, 11), on_next(410, 15), on_next(415, 16), on_next(460, 72),", "9), on_next(230, 13), on_next(270, 7), on_next(280, 1), on_next(300, -1), on_next(310,", "on_completed(690), ) def create(): return xs.pipe(ops.skip(0)) results = scheduler.start(create) assert", "690)] def test_skip_error_same(self): ex = \"ex\" scheduler = TestScheduler() xs", "create(): return xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=400) assert results.messages ==", "create(): return xs.pipe(ops.skip(20)) results = scheduler.start(create) assert results.messages == [on_completed(690)]", "= ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe =", "def test_skip_complete_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70, 6),", "on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), ) def", "10), on_completed(690), ) def create(): return xs.pipe(ops.skip(0)) results = scheduler.start(create)", "on_next(230, 13), on_next(270, 7), on_next(280, 1), on_next(300, -1), on_next(310, 3),", "ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed = ReactiveTest.disposed created = ReactiveTest.created", "10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(17)) results =", "[subscribe(200, 690)] def test_skip_complete_same(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "10), on_error(690, ex), ] assert xs.subscriptions == [subscribe(200, 690)] def", "ex = \"ex\" scheduler = TestScheduler() xs = scheduler.create_hot_observable( on_next(70,", "on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690), ]", "32), on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_error(690,", "10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(20)) results =", "xs.subscriptions == [subscribe(200, 690)] def test_skip_Complete_zero(self): scheduler = TestScheduler() xs", "disposed=400) assert results.messages == [ on_next(280, 1), on_next(300, -1), on_next(310,", "32), on_next(570, -100), on_next(580, -3), on_next(590, 5), on_next(630, 10), on_completed(690),", "xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=400) assert results.messages == [ on_next(280,", "return xs.pipe(ops.skip(17)) results = scheduler.start(create) assert results.messages == [on_completed(690)] assert", "subscribed = ReactiveTest.subscribed disposed = ReactiveTest.disposed created = ReactiveTest.created class", "ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe = ReactiveTest.subscribe", "xs.subscriptions == [subscribe(200, 690)] def test_skip_error_before(self): ex = \"ex\" scheduler", "on_next(630, 10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(3)) results", "6), on_next(150, 4), on_next(210, 9), on_next(230, 13), on_next(270, 7), on_next(280,", "5), on_next(630, 10), on_error(690, ex), ) def create(): return xs.pipe(ops.skip(17))", "xs.pipe(ops.skip(3)) results = scheduler.start(create, disposed=250) assert results.messages == [] assert", "[subscribe(200, 690)] def test_skip_complete_before(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(", "xs.pipe(ops.skip(0)) results = scheduler.start(create) assert results.messages == [ on_next(210, 9),", "scheduler.start(create) assert results.messages == [ on_next(460, 72), on_next(510, 76), on_next(560,", "on_next(590, 5), on_next(630, 10), on_completed(690), ) def create(): return xs.pipe(ops.skip(20))" ]
[ "range(T): n = int(input()) red = input() blue = input()", "+1 elif int(red[i]) < int(blue[i]): bcount = bcount + 1", "== \"__main__\": T = int(input()) for t in range(T): n", "in range(T): n = int(input()) red = input() blue =", "= int(input()) for t in range(T): n = int(input()) red", "blue): rcount = bcount = 0 for i in range(n):", "> int(blue[i]): rcount = rcount +1 elif int(red[i]) < int(blue[i]):", "= 0 for i in range(n): if int(red[i]) > int(blue[i]):", "else ('BLUE' if bcount>rcount else 'EQUAL')) if __name__ == \"__main__\":", "if __name__ == \"__main__\": T = int(input()) for t in", "T = int(input()) for t in range(T): n = int(input())", "def solve(n, red , blue): rcount = bcount = 0", "if bcount>rcount else 'EQUAL')) if __name__ == \"__main__\": T =", "+ 1 print( 'RED' if rcount>bcount else ('BLUE' if bcount>rcount", "int(input()) red = input() blue = input() solve(n, red, blue)", "else 'EQUAL')) if __name__ == \"__main__\": T = int(input()) for", "bcount>rcount else 'EQUAL')) if __name__ == \"__main__\": T = int(input())", "bcount = 0 for i in range(n): if int(red[i]) >", "range(n): if int(red[i]) > int(blue[i]): rcount = rcount +1 elif", "'RED' if rcount>bcount else ('BLUE' if bcount>rcount else 'EQUAL')) if", "solve(n, red , blue): rcount = bcount = 0 for", "for i in range(n): if int(red[i]) > int(blue[i]): rcount =", "'EQUAL')) if __name__ == \"__main__\": T = int(input()) for t", "int(red[i]) < int(blue[i]): bcount = bcount + 1 print( 'RED'", "elif int(red[i]) < int(blue[i]): bcount = bcount + 1 print(", "int(red[i]) > int(blue[i]): rcount = rcount +1 elif int(red[i]) <", "i in range(n): if int(red[i]) > int(blue[i]): rcount = rcount", "bcount = bcount + 1 print( 'RED' if rcount>bcount else", "print( 'RED' if rcount>bcount else ('BLUE' if bcount>rcount else 'EQUAL'))", "= bcount + 1 print( 'RED' if rcount>bcount else ('BLUE'", "for t in range(T): n = int(input()) red = input()", "int(blue[i]): rcount = rcount +1 elif int(red[i]) < int(blue[i]): bcount", "< int(blue[i]): bcount = bcount + 1 print( 'RED' if", "0 for i in range(n): if int(red[i]) > int(blue[i]): rcount", "= bcount = 0 for i in range(n): if int(red[i])", "bcount + 1 print( 'RED' if rcount>bcount else ('BLUE' if", "in range(n): if int(red[i]) > int(blue[i]): rcount = rcount +1", "rcount>bcount else ('BLUE' if bcount>rcount else 'EQUAL')) if __name__ ==", "\"__main__\": T = int(input()) for t in range(T): n =", "1 print( 'RED' if rcount>bcount else ('BLUE' if bcount>rcount else", "n = int(input()) red = input() blue = input() solve(n,", "rcount = bcount = 0 for i in range(n): if", "int(input()) for t in range(T): n = int(input()) red =", "int(blue[i]): bcount = bcount + 1 print( 'RED' if rcount>bcount", "rcount +1 elif int(red[i]) < int(blue[i]): bcount = bcount +", "t in range(T): n = int(input()) red = input() blue", "red , blue): rcount = bcount = 0 for i", "if rcount>bcount else ('BLUE' if bcount>rcount else 'EQUAL')) if __name__", "__name__ == \"__main__\": T = int(input()) for t in range(T):", "= rcount +1 elif int(red[i]) < int(blue[i]): bcount = bcount", "rcount = rcount +1 elif int(red[i]) < int(blue[i]): bcount =", ", blue): rcount = bcount = 0 for i in", "('BLUE' if bcount>rcount else 'EQUAL')) if __name__ == \"__main__\": T", "if int(red[i]) > int(blue[i]): rcount = rcount +1 elif int(red[i])", "= int(input()) red = input() blue = input() solve(n, red," ]
[ "queue import shudder.metadata as metadata from shudder.config import CONFIG import", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "import signal import subprocess import sys if __name__ == '__main__':", "signum = getattr(signal,i) signal.signal(signum, receive_signal) while True: message = queue.poll_queue(sqs_connection,", "time.sleep(30) \"\"\"Send a heart beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a", "under the License. \"\"\"Start polling of SQS and metadata.\"\"\" import", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "permissions and # limitations under the License. \"\"\"Start polling of", "receive_signal(signum, stack): if signum in [1, 2, 3, 15]: print", "= ['SIG_DFL','SIGSTOP','SIGKILL'] for i in [x for x in dir(signal)", "dir(signal) if x.startswith(\"SIG\")]: if not i in uncatchable: signum =", "import requests import signal import subprocess import sys if __name__", "message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if 'endpoint' in CONFIG:", "'commands' in CONFIG: for command in CONFIG[\"commands\"]: print 'Running command:", "if 'commands' in CONFIG: for command in CONFIG[\"commands\"]: print 'Running", "distributed under the License is distributed on an \"AS IS\"", "print 'Caught signal %s, exiting.' % (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue)", "CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands' in CONFIG: for command in CONFIG[\"commands\"]:", "\"\"\"Start polling of SQS and metadata.\"\"\" import shudder.queue as queue", "is None: time.sleep(30) \"\"\"Send a heart beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message)", "in uncatchable: signum = getattr(signal,i) signal.signal(signum, receive_signal) while True: message", "the specific language governing permissions and # limitations under the", "i in uncatchable: signum = getattr(signal,i) signal.signal(signum, receive_signal) while True:", "% command process = subprocess.Popen(command) while process.poll() is None: time.sleep(30)", "as queue import shudder.metadata as metadata from shudder.config import CONFIG", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "shudder.metadata as metadata from shudder.config import CONFIG import time import", "and metadata.\"\"\" import shudder.queue as queue import shudder.metadata as metadata", "os import requests import signal import subprocess import sys if", "uncatchable: signum = getattr(signal,i) signal.signal(signum, receive_signal) while True: message =", "command process = subprocess.Popen(command) while process.poll() is None: time.sleep(30) \"\"\"Send", "== '__main__': sqs_connection, sqs_queue = queue.create_queue() sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue)", "import CONFIG import time import os import requests import signal", "sqs_connection, sqs_queue = queue.create_queue() sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue) def receive_signal(signum,", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "if 'endpoints' in CONFIG: for endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint) if", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "signal import subprocess import sys if __name__ == '__main__': sqs_connection,", "in [1, 2, 3, 15]: print 'Caught signal %s, exiting.'", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "if not i in uncatchable: signum = getattr(signal,i) signal.signal(signum, receive_signal)", "['SIG_DFL','SIGSTOP','SIGKILL'] for i in [x for x in dir(signal) if", "__name__ == '__main__': sqs_connection, sqs_queue = queue.create_queue() sns_connection, subscription_arn =", "writing, software # distributed under the License is distributed on", "# limitations under the License. \"\"\"Start polling of SQS and", "in writing, software # distributed under the License is distributed", "def receive_signal(signum, stack): if signum in [1, 2, 3, 15]:", "import shudder.queue as queue import shudder.metadata as metadata from shudder.config", "x.startswith(\"SIG\")]: if not i in uncatchable: signum = getattr(signal,i) signal.signal(signum,", "requests import signal import subprocess import sys if __name__ ==", "you may not use this file except in compliance with", "message = queue.poll_queue(sqs_connection, sqs_queue) if message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn,", "[1, 2, 3, 15]: print 'Caught signal %s, exiting.' %", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "as metadata from shudder.config import CONFIG import time import os", "in [x for x in dir(signal) if x.startswith(\"SIG\")]: if not", "metadata.\"\"\" import shudder.queue as queue import shudder.metadata as metadata from", "not i in uncatchable: signum = getattr(signal,i) signal.signal(signum, receive_signal) while", "(str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit() else: print 'Caught signal %s,", "else: print 'Caught signal %s, ignoring.' % (str(signum)) uncatchable =", "use this file except in compliance with the License. #", "% (str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for i in [x for", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "Copyright 2014 Scopely, Inc. # # Licensed under the Apache", "or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if 'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"])", "while process.poll() is None: time.sleep(30) \"\"\"Send a heart beat to", "subscription_arn, sqs_queue) if 'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in", "signal %s, exiting.' % (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit() else:", "= queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack): if signum in [1, 2,", "print 'Caught signal %s, ignoring.' % (str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL']", "if 'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in CONFIG: for", "CONDITIONS OF ANY KIND, either express or implied. # See", "uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for i in [x for x in", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "% (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit() else: print 'Caught signal", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "sys.exit() else: print 'Caught signal %s, ignoring.' % (str(signum)) uncatchable", "queue.poll_queue(sqs_connection, sqs_queue) if message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "polling of SQS and metadata.\"\"\" import shudder.queue as queue import", "queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if 'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints'", "while True: message = queue.poll_queue(sqs_connection, sqs_queue) if message or metadata.poll_instance_metadata():", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "signal %s, ignoring.' % (str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for i", "None: time.sleep(30) \"\"\"Send a heart beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send", "sys if __name__ == '__main__': sqs_connection, sqs_queue = queue.create_queue() sns_connection,", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "License for the specific language governing permissions and # limitations", "queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack): if signum in [1, 2, 3,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "SQS and metadata.\"\"\" import shudder.queue as queue import shudder.metadata as", "CONFIG import time import os import requests import signal import", "import time import os import requests import signal import subprocess", "True: message = queue.poll_queue(sqs_connection, sqs_queue) if message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection,", "for i in [x for x in dir(signal) if x.startswith(\"SIG\")]:", "queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit() else: print 'Caught signal %s, ignoring.'", "= getattr(signal,i) signal.signal(signum, receive_signal) while True: message = queue.poll_queue(sqs_connection, sqs_queue)", "Scopely, Inc. # # Licensed under the Apache License, Version", "from shudder.config import CONFIG import time import os import requests", "sqs_queue) if 'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in CONFIG:", "governing permissions and # limitations under the License. \"\"\"Start polling", "shudder.queue as queue import shudder.metadata as metadata from shudder.config import", "\"\"\"Send a heart beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a complete", "the License for the specific language governing permissions and #", "import sys if __name__ == '__main__': sqs_connection, sqs_queue = queue.create_queue()", "import os import requests import signal import subprocess import sys", "(the \"License\"); # you may not use this file except", "[x for x in dir(signal) if x.startswith(\"SIG\")]: if not i", "Apache License, Version 2.0 (the \"License\"); # you may not", "getattr(signal,i) signal.signal(signum, receive_signal) while True: message = queue.poll_queue(sqs_connection, sqs_queue) if", "# you may not use this file except in compliance", "either express or implied. # See the License for the", "ignoring.' % (str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for i in [x", "beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a complete lifecycle action\"\"\" queue.complete_lifecycle_action(message)", "OR CONDITIONS OF ANY KIND, either express or implied. #", "exiting.' % (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit() else: print 'Caught", "sqs_queue) sys.exit() else: print 'Caught signal %s, ignoring.' % (str(signum))", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "the License is distributed on an \"AS IS\" BASIS, #", "in compliance with the License. # You may obtain a", "and # limitations under the License. \"\"\"Start polling of SQS", "software # distributed under the License is distributed on an", "if __name__ == '__main__': sqs_connection, sqs_queue = queue.create_queue() sns_connection, subscription_arn", "(str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for i in [x for x", "# # Unless required by applicable law or agreed to", "15]: print 'Caught signal %s, exiting.' % (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn,", "CONFIG[\"commands\"]: print 'Running command: %s' % command process = subprocess.Popen(command)", "in CONFIG[\"commands\"]: print 'Running command: %s' % command process =", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "# Copyright 2014 Scopely, Inc. # # Licensed under the", "'Caught signal %s, exiting.' % (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit()", "CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in CONFIG: for endpoint in CONFIG[\"endpoints\"]:", "sqs_queue) if message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if 'endpoint'", "Version 2.0 (the \"License\"); # you may not use this", "for endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands' in CONFIG: for", "command: %s' % command process = subprocess.Popen(command) while process.poll() is", "if message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if 'endpoint' in", "signal.signal(signum, receive_signal) while True: message = queue.poll_queue(sqs_connection, sqs_queue) if message", "'endpoints' in CONFIG: for endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands'", "shudder.config import CONFIG import time import os import requests import", "sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack): if signum in", "law or agreed to in writing, software # distributed under", "in CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands' in CONFIG: for command in", "i in [x for x in dir(signal) if x.startswith(\"SIG\")]: if", "sqs_queue = queue.create_queue() sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack):", "'Running command: %s' % command process = subprocess.Popen(command) while process.poll()", "heart beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a complete lifecycle action\"\"\"", "CONFIG: for endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands' in CONFIG:", "License. \"\"\"Start polling of SQS and metadata.\"\"\" import shudder.queue as", "implied. # See the License for the specific language governing", "'__main__': sqs_connection, sqs_queue = queue.create_queue() sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue) def", "signum in [1, 2, 3, 15]: print 'Caught signal %s,", "endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands' in CONFIG: for command", "aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a complete lifecycle action\"\"\" queue.complete_lifecycle_action(message) sys.exit(0) time.sleep(5)", "under the Apache License, Version 2.0 (the \"License\"); # you", "limitations under the License. \"\"\"Start polling of SQS and metadata.\"\"\"", "\"License\"); # you may not use this file except in", "'Caught signal %s, ignoring.' % (str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for", "requests.get(endpoint) if 'commands' in CONFIG: for command in CONFIG[\"commands\"]: print", "the License. \"\"\"Start polling of SQS and metadata.\"\"\" import shudder.queue", "in dir(signal) if x.startswith(\"SIG\")]: if not i in uncatchable: signum", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "in CONFIG: for endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint) if 'commands' in", "process = subprocess.Popen(command) while process.poll() is None: time.sleep(30) \"\"\"Send a", "stack): if signum in [1, 2, 3, 15]: print 'Caught", "metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) if 'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"]) if", "3, 15]: print 'Caught signal %s, exiting.' % (str(signum)) queue.clean_up_sns(sns_connection,", "queue.create_queue() sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack): if signum", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "x in dir(signal) if x.startswith(\"SIG\")]: if not i in uncatchable:", "subprocess.Popen(command) while process.poll() is None: time.sleep(30) \"\"\"Send a heart beat", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "= queue.create_queue() sns_connection, subscription_arn = queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack): if", "'endpoint' in CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in CONFIG: for endpoint", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "2, 3, 15]: print 'Caught signal %s, exiting.' % (str(signum))", "to in writing, software # distributed under the License is", "%s, exiting.' % (str(signum)) queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue) sys.exit() else: print", "%s, ignoring.' % (str(signum)) uncatchable = ['SIG_DFL','SIGSTOP','SIGKILL'] for i in", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "# See the License for the specific language governing permissions", "receive_signal) while True: message = queue.poll_queue(sqs_connection, sqs_queue) if message or", "requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in CONFIG: for endpoint in CONFIG[\"endpoints\"]: requests.get(endpoint)", "in CONFIG: for command in CONFIG[\"commands\"]: print 'Running command: %s'", "2014 Scopely, Inc. # # Licensed under the Apache License,", "for command in CONFIG[\"commands\"]: print 'Running command: %s' % command", "You may obtain a copy of the License at #", "metadata from shudder.config import CONFIG import time import os import", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "if x.startswith(\"SIG\")]: if not i in uncatchable: signum = getattr(signal,i)", "required by applicable law or agreed to in writing, software", "%s' % command process = subprocess.Popen(command) while process.poll() is None:", "language governing permissions and # limitations under the License. \"\"\"Start", "for x in dir(signal) if x.startswith(\"SIG\")]: if not i in", "= subprocess.Popen(command) while process.poll() is None: time.sleep(30) \"\"\"Send a heart", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "if signum in [1, 2, 3, 15]: print 'Caught signal", "with the License. # You may obtain a copy of", "to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a complete lifecycle action\"\"\" queue.complete_lifecycle_action(message) sys.exit(0)", "this file except in compliance with the License. # You", "time import os import requests import signal import subprocess import", "print 'Running command: %s' % command process = subprocess.Popen(command) while", "the Apache License, Version 2.0 (the \"License\"); # you may", "subprocess import sys if __name__ == '__main__': sqs_connection, sqs_queue =", "in CONFIG: requests.get(CONFIG[\"endpoint\"]) if 'endpoints' in CONFIG: for endpoint in", "command in CONFIG[\"commands\"]: print 'Running command: %s' % command process", "CONFIG: for command in CONFIG[\"commands\"]: print 'Running command: %s' %", "a heart beat to aws\"\"\" queue.record_lifecycle_action_heartbeat(message) \"\"\"Send a complete lifecycle", "import shudder.metadata as metadata from shudder.config import CONFIG import time", "= queue.poll_queue(sqs_connection, sqs_queue) if message or metadata.poll_instance_metadata(): queue.clean_up_sns(sns_connection, subscription_arn, sqs_queue)", "subscription_arn, sqs_queue) sys.exit() else: print 'Caught signal %s, ignoring.' %", "of SQS and metadata.\"\"\" import shudder.queue as queue import shudder.metadata", "process.poll() is None: time.sleep(30) \"\"\"Send a heart beat to aws\"\"\"", "import subprocess import sys if __name__ == '__main__': sqs_connection, sqs_queue", "subscription_arn = queue.subscribe_sns(sqs_queue) def receive_signal(signum, stack): if signum in [1," ]
[ "1. / r return rm1.reshape(-1, 1) def electronic_potential(self, pos): return", "= [1.] pos, e, s = vmc.single_point(opt_param) print('Energy : ',", "/ r return rm1.reshape(-1, 1) def electronic_potential(self, pos): return 0", "1)) return 2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self, pos): r = np.sqrt(np.sum(pos**2,", "VMS solver vmc = VMC(wf=wf, sampler=sampler, optimizer=optimizer) # single point", "np.sqrt(np.sum(pos**2, 1)) rm1 = - 1. / r return rm1.reshape(-1,", "__name__ == \"__main__\": wf = Hydrogen(nelec=1, ndim=3) sampler = Metropolis(nwalkers=1000,", "point opt_param = [1.] pos, e, s = vmc.single_point(opt_param) print('Energy", "tol=1E-4) # VMS solver vmc = VMC(wf=wf, sampler=sampler, optimizer=optimizer) #", "values of psi \"\"\" beta = parameters[0] if pos.ndim ==", "= pos.reshape(1, -1) r = np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1, 1)", "wave function. Args: parameters : parameters of th wf x:", "\"\"\" beta = parameters[0] if pos.ndim == 1: pos =", "print('Variance : ', s) vmc.plot_density(pos) # optimization init_param = [0.5]", "pyCHAMP.optimizer.minimize import Minimize from pyCHAMP.sampler.metropolis import Metropolis from pyCHAMP.sampler.hamiltonian import", ": ', s) vmc.plot_density(pos) # optimization init_param = [0.5] vmc.optimize(init_param)", "sampler=sampler, optimizer=optimizer) # single point opt_param = [1.] pos, e,", "nelec=1, ndim=3, domain={'min': -5, 'max': 5}) sampler = Hamiltonian(nwalkers=1000, nstep=1000,", "Minimize from pyCHAMP.sampler.metropolis import Metropolis from pyCHAMP.sampler.hamiltonian import Hamiltonian from", "electronic_potential(self, pos): return 0 if __name__ == \"__main__\": wf =", "= Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3, domain={'min': -5, 'max': 5})", "nstep=1000, step_size=3, nelec=1, ndim=3, domain={'min': -5, 'max': 5}) sampler =", "\"\"\" Compute the value of the wave function. Args: parameters", "value of the wave function. Args: parameters : parameters of", "solver vmc = VMC(wf=wf, sampler=sampler, optimizer=optimizer) # single point opt_param", "class Hydrogen(WF): def __init__(self, nelec, ndim): WF.__init__(self, nelec, ndim) def", "return rm1.reshape(-1, 1) def electronic_potential(self, pos): return 0 if __name__", "rm1.reshape(-1, 1) def electronic_potential(self, pos): return 0 if __name__ ==", "def __init__(self, nelec, ndim): WF.__init__(self, nelec, ndim) def values(self, parameters,", "nstep=1000, step_size=3, nelec=1, ndim=3) optimizer = Minimize(method='bfgs', maxiter=25, tol=1E-4) #", "pos, e, s = vmc.single_point(opt_param) print('Energy : ', e) print('Variance", "from pyCHAMP.optimizer.minimize import Minimize from pyCHAMP.sampler.metropolis import Metropolis from pyCHAMP.sampler.hamiltonian", "[1.] pos, e, s = vmc.single_point(opt_param) print('Energy : ', e)", ": parameters of th wf x: position of the electron", "VMC(wf=wf, sampler=sampler, optimizer=optimizer) # single point opt_param = [1.] pos,", "pos.reshape(1, -1) r = np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1, 1) def", "\"__main__\": wf = Hydrogen(nelec=1, ndim=3) sampler = Metropolis(nwalkers=1000, nstep=1000, step_size=3,", ": ', e) print('Variance : ', s) vmc.plot_density(pos) # optimization", "e) print('Variance : ', s) vmc.plot_density(pos) # optimization init_param =", "single point opt_param = [1.] pos, e, s = vmc.single_point(opt_param)", "of th wf x: position of the electron Returns: values", "pos.ndim == 1: pos = pos.reshape(1, -1) r = np.sqrt(np.sum(pos**2,", "vmc = VMC(wf=wf, sampler=sampler, optimizer=optimizer) # single point opt_param =", "step_size=3, nelec=1, ndim=3, domain={'min': -5, 'max': 5}) sampler = Hamiltonian(nwalkers=1000,", "1) def electronic_potential(self, pos): return 0 if __name__ == \"__main__\":", "# single point opt_param = [1.] pos, e, s =", "ndim): WF.__init__(self, nelec, ndim) def values(self, parameters, pos): \"\"\" Compute", "pyCHAMP.sampler.hamiltonian import Hamiltonian from pyCHAMP.solver.vmc import VMC class Hydrogen(WF): def", "parameters, pos): \"\"\" Compute the value of the wave function.", "== 1: pos = pos.reshape(1, -1) r = np.sqrt(np.sum(pos**2, 1))", "r = np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self, pos):", "values(self, parameters, pos): \"\"\" Compute the value of the wave", "wf x: position of the electron Returns: values of psi", "-1) r = np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self,", "= - 1. / r return rm1.reshape(-1, 1) def electronic_potential(self,", "Args: parameters : parameters of th wf x: position of", "electron Returns: values of psi \"\"\" beta = parameters[0] if", "x: position of the electron Returns: values of psi \"\"\"", "'max': 5}) sampler = Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3) optimizer", "from pyCHAMP.solver.vmc import VMC class Hydrogen(WF): def __init__(self, nelec, ndim):", "= Minimize(method='bfgs', maxiter=25, tol=1E-4) # VMS solver vmc = VMC(wf=wf,", "pos): \"\"\" Compute the value of the wave function. Args:", "print('Energy : ', e) print('Variance : ', s) vmc.plot_density(pos) #", "the wave function. Args: parameters : parameters of th wf", "Returns: values of psi \"\"\" beta = parameters[0] if pos.ndim", "= np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self, pos): r", "return 2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self, pos): r = np.sqrt(np.sum(pos**2, 1))", "e, s = vmc.single_point(opt_param) print('Energy : ', e) print('Variance :", "as np from pyCHAMP.wavefunction.wf_base import WF from pyCHAMP.optimizer.minimize import Minimize", "pyCHAMP.sampler.metropolis import Metropolis from pyCHAMP.sampler.hamiltonian import Hamiltonian from pyCHAMP.solver.vmc import", "vmc.single_point(opt_param) print('Energy : ', e) print('Variance : ', s) vmc.plot_density(pos)", "VMC class Hydrogen(WF): def __init__(self, nelec, ndim): WF.__init__(self, nelec, ndim)", "Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3, domain={'min': -5, 'max': 5}) sampler", "optimizer=optimizer) # single point opt_param = [1.] pos, e, s", "nelec, ndim) def values(self, parameters, pos): \"\"\" Compute the value", "th wf x: position of the electron Returns: values of", "from pyCHAMP.sampler.metropolis import Metropolis from pyCHAMP.sampler.hamiltonian import Hamiltonian from pyCHAMP.solver.vmc", "WF.__init__(self, nelec, ndim) def values(self, parameters, pos): \"\"\" Compute the", "pyCHAMP.solver.vmc import VMC class Hydrogen(WF): def __init__(self, nelec, ndim): WF.__init__(self,", "Metropolis from pyCHAMP.sampler.hamiltonian import Hamiltonian from pyCHAMP.solver.vmc import VMC class", "r = np.sqrt(np.sum(pos**2, 1)) rm1 = - 1. / r", "Hamiltonian from pyCHAMP.solver.vmc import VMC class Hydrogen(WF): def __init__(self, nelec,", "__init__(self, nelec, ndim): WF.__init__(self, nelec, ndim) def values(self, parameters, pos):", "def nuclear_potential(self, pos): r = np.sqrt(np.sum(pos**2, 1)) rm1 = -", "sampler = Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3) optimizer = Minimize(method='bfgs',", "Hydrogen(nelec=1, ndim=3) sampler = Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3, domain={'min':", "1) def nuclear_potential(self, pos): r = np.sqrt(np.sum(pos**2, 1)) rm1 =", "return 0 if __name__ == \"__main__\": wf = Hydrogen(nelec=1, ndim=3)", "s = vmc.single_point(opt_param) print('Energy : ', e) print('Variance : ',", "r return rm1.reshape(-1, 1) def electronic_potential(self, pos): return 0 if", "Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3) optimizer = Minimize(method='bfgs', maxiter=25, tol=1E-4)", "ndim) def values(self, parameters, pos): \"\"\" Compute the value of", "- 1. / r return rm1.reshape(-1, 1) def electronic_potential(self, pos):", "import Metropolis from pyCHAMP.sampler.hamiltonian import Hamiltonian from pyCHAMP.solver.vmc import VMC", "domain={'min': -5, 'max': 5}) sampler = Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1,", "# VMS solver vmc = VMC(wf=wf, sampler=sampler, optimizer=optimizer) # single", "np from pyCHAMP.wavefunction.wf_base import WF from pyCHAMP.optimizer.minimize import Minimize from", "pos): r = np.sqrt(np.sum(pos**2, 1)) rm1 = - 1. /", "nelec, ndim): WF.__init__(self, nelec, ndim) def values(self, parameters, pos): \"\"\"", "psi \"\"\" beta = parameters[0] if pos.ndim == 1: pos", "sampler = Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3, domain={'min': -5, 'max':", "the electron Returns: values of psi \"\"\" beta = parameters[0]", "def electronic_potential(self, pos): return 0 if __name__ == \"__main__\": wf", "beta = parameters[0] if pos.ndim == 1: pos = pos.reshape(1,", "', e) print('Variance : ', s) vmc.plot_density(pos) # optimization init_param", "2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self, pos): r = np.sqrt(np.sum(pos**2, 1)) rm1", "the value of the wave function. Args: parameters : parameters", "parameters : parameters of th wf x: position of the", "-5, 'max': 5}) sampler = Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3)", "1: pos = pos.reshape(1, -1) r = np.sqrt(np.sum(pos**2, 1)) return", "1)) rm1 = - 1. / r return rm1.reshape(-1, 1)", "nelec=1, ndim=3) optimizer = Minimize(method='bfgs', maxiter=25, tol=1E-4) # VMS solver", "WF from pyCHAMP.optimizer.minimize import Minimize from pyCHAMP.sampler.metropolis import Metropolis from", "parameters[0] if pos.ndim == 1: pos = pos.reshape(1, -1) r", "import Minimize from pyCHAMP.sampler.metropolis import Metropolis from pyCHAMP.sampler.hamiltonian import Hamiltonian", "wf = Hydrogen(nelec=1, ndim=3) sampler = Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1,", "opt_param = [1.] pos, e, s = vmc.single_point(opt_param) print('Energy :", "import autograd.numpy as np from pyCHAMP.wavefunction.wf_base import WF from pyCHAMP.optimizer.minimize", "of the wave function. Args: parameters : parameters of th", "5}) sampler = Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3) optimizer =", "pos = pos.reshape(1, -1) r = np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1,", "if pos.ndim == 1: pos = pos.reshape(1, -1) r =", "import VMC class Hydrogen(WF): def __init__(self, nelec, ndim): WF.__init__(self, nelec,", "ndim=3, domain={'min': -5, 'max': 5}) sampler = Hamiltonian(nwalkers=1000, nstep=1000, step_size=3,", "np.sqrt(np.sum(pos**2, 1)) return 2*np.exp(-beta*r).reshape(-1, 1) def nuclear_potential(self, pos): r =", "= Hamiltonian(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3) optimizer = Minimize(method='bfgs', maxiter=25,", "def values(self, parameters, pos): \"\"\" Compute the value of the", "optimizer = Minimize(method='bfgs', maxiter=25, tol=1E-4) # VMS solver vmc =", "== \"__main__\": wf = Hydrogen(nelec=1, ndim=3) sampler = Metropolis(nwalkers=1000, nstep=1000,", "from pyCHAMP.sampler.hamiltonian import Hamiltonian from pyCHAMP.solver.vmc import VMC class Hydrogen(WF):", "import Hamiltonian from pyCHAMP.solver.vmc import VMC class Hydrogen(WF): def __init__(self,", "Hydrogen(WF): def __init__(self, nelec, ndim): WF.__init__(self, nelec, ndim) def values(self,", "pyCHAMP.wavefunction.wf_base import WF from pyCHAMP.optimizer.minimize import Minimize from pyCHAMP.sampler.metropolis import", "= vmc.single_point(opt_param) print('Energy : ', e) print('Variance : ', s)", "= Hydrogen(nelec=1, ndim=3) sampler = Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3,", "of psi \"\"\" beta = parameters[0] if pos.ndim == 1:", "maxiter=25, tol=1E-4) # VMS solver vmc = VMC(wf=wf, sampler=sampler, optimizer=optimizer)", "rm1 = - 1. / r return rm1.reshape(-1, 1) def", "ndim=3) sampler = Metropolis(nwalkers=1000, nstep=1000, step_size=3, nelec=1, ndim=3, domain={'min': -5,", "= VMC(wf=wf, sampler=sampler, optimizer=optimizer) # single point opt_param = [1.]", "of the electron Returns: values of psi \"\"\" beta =", "autograd.numpy as np from pyCHAMP.wavefunction.wf_base import WF from pyCHAMP.optimizer.minimize import", "parameters of th wf x: position of the electron Returns:", "import WF from pyCHAMP.optimizer.minimize import Minimize from pyCHAMP.sampler.metropolis import Metropolis", "Compute the value of the wave function. Args: parameters :", "nuclear_potential(self, pos): r = np.sqrt(np.sum(pos**2, 1)) rm1 = - 1.", "function. Args: parameters : parameters of th wf x: position", "position of the electron Returns: values of psi \"\"\" beta", "ndim=3) optimizer = Minimize(method='bfgs', maxiter=25, tol=1E-4) # VMS solver vmc", "0 if __name__ == \"__main__\": wf = Hydrogen(nelec=1, ndim=3) sampler", "pos): return 0 if __name__ == \"__main__\": wf = Hydrogen(nelec=1,", "= parameters[0] if pos.ndim == 1: pos = pos.reshape(1, -1)", "= np.sqrt(np.sum(pos**2, 1)) rm1 = - 1. / r return", "if __name__ == \"__main__\": wf = Hydrogen(nelec=1, ndim=3) sampler =", "step_size=3, nelec=1, ndim=3) optimizer = Minimize(method='bfgs', maxiter=25, tol=1E-4) # VMS", "from pyCHAMP.wavefunction.wf_base import WF from pyCHAMP.optimizer.minimize import Minimize from pyCHAMP.sampler.metropolis", "', s) vmc.plot_density(pos) # optimization init_param = [0.5] vmc.optimize(init_param) vmc.plot_history()", "Minimize(method='bfgs', maxiter=25, tol=1E-4) # VMS solver vmc = VMC(wf=wf, sampler=sampler," ]
[ "braintree.configuration import Configuration from braintree.resource import Resource class AccountUpdaterDailyReport(Resource): def", "attributes: self.report_url = attributes.pop(\"report_url\") if \"report_date\" in attributes: self.report_date =", "AccountUpdaterDailyReport(Resource): def __init__(self, gateway, attributes): Resource.__init__(self, gateway, attributes) if \"report_url\"", "Resource.__init__(self, gateway, attributes) if \"report_url\" in attributes: self.report_url = attributes.pop(\"report_url\")", "import Resource class AccountUpdaterDailyReport(Resource): def __init__(self, gateway, attributes): Resource.__init__(self, gateway,", "attributes) if \"report_url\" in attributes: self.report_url = attributes.pop(\"report_url\") if \"report_date\"", "in attributes: self.report_url = attributes.pop(\"report_url\") if \"report_date\" in attributes: self.report_date", "braintree.resource import Resource class AccountUpdaterDailyReport(Resource): def __init__(self, gateway, attributes): Resource.__init__(self,", "attributes: self.report_date = attributes.pop(\"report_date\") def __repr__(self): detail_list = [\"report_url\", \"report_date\"]", "\"report_url\" in attributes: self.report_url = attributes.pop(\"report_url\") if \"report_date\" in attributes:", "from braintree.configuration import Configuration from braintree.resource import Resource class AccountUpdaterDailyReport(Resource):", "attributes): Resource.__init__(self, gateway, attributes) if \"report_url\" in attributes: self.report_url =", "gateway, attributes): Resource.__init__(self, gateway, attributes) if \"report_url\" in attributes: self.report_url", "if \"report_date\" in attributes: self.report_date = attributes.pop(\"report_date\") def __repr__(self): detail_list", "= attributes.pop(\"report_date\") def __repr__(self): detail_list = [\"report_url\", \"report_date\"] return super(AccountUpdaterDailyReport,", "if \"report_url\" in attributes: self.report_url = attributes.pop(\"report_url\") if \"report_date\" in", "Resource class AccountUpdaterDailyReport(Resource): def __init__(self, gateway, attributes): Resource.__init__(self, gateway, attributes)", "in attributes: self.report_date = attributes.pop(\"report_date\") def __repr__(self): detail_list = [\"report_url\",", "attributes.pop(\"report_url\") if \"report_date\" in attributes: self.report_date = attributes.pop(\"report_date\") def __repr__(self):", "\"report_date\" in attributes: self.report_date = attributes.pop(\"report_date\") def __repr__(self): detail_list =", "__init__(self, gateway, attributes): Resource.__init__(self, gateway, attributes) if \"report_url\" in attributes:", "self.report_date = attributes.pop(\"report_date\") def __repr__(self): detail_list = [\"report_url\", \"report_date\"] return", "gateway, attributes) if \"report_url\" in attributes: self.report_url = attributes.pop(\"report_url\") if", "from braintree.resource import Resource class AccountUpdaterDailyReport(Resource): def __init__(self, gateway, attributes):", "class AccountUpdaterDailyReport(Resource): def __init__(self, gateway, attributes): Resource.__init__(self, gateway, attributes) if", "attributes.pop(\"report_date\") def __repr__(self): detail_list = [\"report_url\", \"report_date\"] return super(AccountUpdaterDailyReport, self).__repr__(detail_list)", "= attributes.pop(\"report_url\") if \"report_date\" in attributes: self.report_date = attributes.pop(\"report_date\") def", "def __init__(self, gateway, attributes): Resource.__init__(self, gateway, attributes) if \"report_url\" in", "self.report_url = attributes.pop(\"report_url\") if \"report_date\" in attributes: self.report_date = attributes.pop(\"report_date\")", "Configuration from braintree.resource import Resource class AccountUpdaterDailyReport(Resource): def __init__(self, gateway,", "import Configuration from braintree.resource import Resource class AccountUpdaterDailyReport(Resource): def __init__(self," ]
[ "= Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA)", "def __init__(self, all_sprites, all_balls): self.all_sprites = all_sprites self.all_balls = all_balls", "self.x_pos = pos[0] self.y_pos = pos[1] self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos))", "ball = Ball(pos, vel, team) self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self): print(\"This", "self.rad), self.rad) self.x_pos = pos[0] self.y_pos = pos[1] self.rect =", "vel[0] self.dy = vel[1] def update(self): self.check_boundary() self.x_pos += self.dx", "pygame.gfxdraw from constants import Constants class Balls(object): def __init__(self, all_sprites,", "self.rad = int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file,", "self.rect.center = pygame.mouse.get_pos() # has sprite follow the mouse def", "vel[1] def update(self): self.check_boundary() self.x_pos += self.dx self.y_pos += self.dy", "out how to spawn multiple balls with some sort of", "def __init__(self, pos, vel, team): super().__init__() self.color = team self.file", "= pos[1] self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx = vel[0] self.dy", "not Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx if", "<= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx if not 0", "Ball(pygame.sprite.Sprite): def __init__(self, pos, vel, team): super().__init__() self.color = team", "some sort of delay ball = Ball(pos, vel, team) self.all_sprites.add(ball)", "if not 0 <= self.y_pos <= Constants.SCREEN_HEIGHT: self.dy = -1*self.dy", "vel, team) self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self): print(\"This is a Ball", "super().__init__() self.color = team self.file = Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2)", "# has sprite follow the mouse def check_boundary(self): if not", "Constants class Balls(object): def __init__(self, all_sprites, all_balls): self.all_sprites = all_sprites", "self.check_boundary() self.x_pos += self.dx self.y_pos += self.dy self.rect.center = [self.x_pos,", "self.dx self.y_pos += self.dy self.rect.center = [self.x_pos, self.y_pos] # self.rect.center", "follow the mouse def check_boundary(self): if not Constants.PLAYER_WIDTH <= self.x_pos", "import pygame import pygame.gfxdraw from constants import Constants class Balls(object):", "print(type(self)) class Ball(pygame.sprite.Sprite): def __init__(self, pos, vel, team): super().__init__() self.color", "self.dy = vel[1] def update(self): self.check_boundary() self.x_pos += self.dx self.y_pos", "self.all_balls = all_balls def spawn_ball(self, pos, vel, team): # Todo:", "Figure out how to spawn multiple balls with some sort", "team) self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self): print(\"This is a Ball Test!\")", "# Todo: Figure out how to spawn multiple balls with", "= self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx = vel[0] self.dy = vel[1] def", "sprite follow the mouse def check_boundary(self): if not Constants.PLAYER_WIDTH <=", "= pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad) self.x_pos", "print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite): def __init__(self, pos, vel, team): super().__init__()", "team self.file = Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE,", "class Balls(object): def __init__(self, all_sprites, all_balls): self.all_sprites = all_sprites self.all_balls", "vel, team): super().__init__() self.color = team self.file = Constants.BALL_TEAMS[self.color] self.rad", "balls with some sort of delay ball = Ball(pos, vel,", "of delay ball = Ball(pos, vel, team) self.all_sprites.add(ball) self.all_balls.add(ball) def", "check_boundary(self): if not Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx =", "has sprite follow the mouse def check_boundary(self): if not Constants.PLAYER_WIDTH", "all_sprites self.all_balls = all_balls def spawn_ball(self, pos, vel, team): #", "multiple balls with some sort of delay ball = Ball(pos,", "Ball Test!\") print(self) def update(self): print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite): def", "= team self.file = Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2) self.image =", "self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx = vel[0] self.dy = vel[1]", "= pos[0] self.y_pos = pos[1] self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx", "Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad) self.x_pos = pos[0]", "pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad) self.x_pos = pos[0] self.y_pos", "update(self): print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite): def __init__(self, pos, vel, team):", "with some sort of delay ball = Ball(pos, vel, team)", "-1*self.dx if not 0 <= self.y_pos <= Constants.SCREEN_HEIGHT: self.dy =", "update(self): self.check_boundary() self.x_pos += self.dx self.y_pos += self.dy self.rect.center =", "all_balls): self.all_sprites = all_sprites self.all_balls = all_balls def spawn_ball(self, pos,", "Test!\") print(self) def update(self): print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite): def __init__(self,", "= pygame.mouse.get_pos() # has sprite follow the mouse def check_boundary(self):", "self.dx = -1*self.dx if not 0 <= self.y_pos <= Constants.SCREEN_HEIGHT:", "self.dy self.rect.center = [self.x_pos, self.y_pos] # self.rect.center = pygame.mouse.get_pos() #", "delay ball = Ball(pos, vel, team) self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self):", "self.y_pos)) self.dx = vel[0] self.dy = vel[1] def update(self): self.check_boundary()", "self.x_pos += self.dx self.y_pos += self.dy self.rect.center = [self.x_pos, self.y_pos]", "self.all_sprites = all_sprites self.all_balls = all_balls def spawn_ball(self, pos, vel,", "= all_sprites self.all_balls = all_balls def spawn_ball(self, pos, vel, team):", "= vel[0] self.dy = vel[1] def update(self): self.check_boundary() self.x_pos +=", "= int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad,", "= [self.x_pos, self.y_pos] # self.rect.center = pygame.mouse.get_pos() # has sprite", "def update(self): self.check_boundary() self.x_pos += self.dx self.y_pos += self.dy self.rect.center", "Ball(pos, vel, team) self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self): print(\"This is a", "import Constants class Balls(object): def __init__(self, all_sprites, all_balls): self.all_sprites =", "+= self.dy self.rect.center = [self.x_pos, self.y_pos] # self.rect.center = pygame.mouse.get_pos()", "the mouse def check_boundary(self): if not Constants.PLAYER_WIDTH <= self.x_pos <=", "[self.x_pos, self.y_pos] # self.rect.center = pygame.mouse.get_pos() # has sprite follow", "Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image,", "vel, team): # Todo: Figure out how to spawn multiple", "pos, vel, team): # Todo: Figure out how to spawn", "ball_test(self): print(\"This is a Ball Test!\") print(self) def update(self): print(self.__dict__)", "spawn multiple balls with some sort of delay ball =", "print(\"This is a Ball Test!\") print(self) def update(self): print(self.__dict__) print(type(self))", "(Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx if not 0 <= self.y_pos <=", "self.y_pos += self.dy self.rect.center = [self.x_pos, self.y_pos] # self.rect.center =", "= Ball(pos, vel, team) self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self): print(\"This is", "from constants import Constants class Balls(object): def __init__(self, all_sprites, all_balls):", "team): # Todo: Figure out how to spawn multiple balls", "Todo: Figure out how to spawn multiple balls with some", "self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx if not 0 <=", "= all_balls def spawn_ball(self, pos, vel, team): # Todo: Figure", "__init__(self, all_sprites, all_balls): self.all_sprites = all_sprites self.all_balls = all_balls def", "self.color = team self.file = Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2) self.image", "self.rect.center = [self.x_pos, self.y_pos] # self.rect.center = pygame.mouse.get_pos() # has", "def spawn_ball(self, pos, vel, team): # Todo: Figure out how", "self.all_balls.add(ball) def ball_test(self): print(\"This is a Ball Test!\") print(self) def", "self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx = vel[0] self.dy = vel[1] def update(self):", "import pygame.gfxdraw from constants import Constants class Balls(object): def __init__(self,", "pos[0] self.y_pos = pos[1] self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx =", "self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad)", "self.y_pos] # self.rect.center = pygame.mouse.get_pos() # has sprite follow the", "a Ball Test!\") print(self) def update(self): print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite):", "pos[1] self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx = vel[0] self.dy =", "# self.rect.center = pygame.mouse.get_pos() # has sprite follow the mouse", "pygame.mouse.get_pos() # has sprite follow the mouse def check_boundary(self): if", "pos, vel, team): super().__init__() self.color = team self.file = Constants.BALL_TEAMS[self.color]", "int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad, self.rad),", "to spawn multiple balls with some sort of delay ball", "constants import Constants class Balls(object): def __init__(self, all_sprites, all_balls): self.all_sprites", "self.file, (self.rad, self.rad), self.rad) self.x_pos = pos[0] self.y_pos = pos[1]", "def check_boundary(self): if not Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx", "self.dx = vel[0] self.dy = vel[1] def update(self): self.check_boundary() self.x_pos", "self.y_pos = pos[1] self.rect = self.image.get_rect(center=(self.x_pos, self.y_pos)) self.dx = vel[0]", "how to spawn multiple balls with some sort of delay", "self.all_sprites.add(ball) self.all_balls.add(ball) def ball_test(self): print(\"This is a Ball Test!\") print(self)", "class Ball(pygame.sprite.Sprite): def __init__(self, pos, vel, team): super().__init__() self.color =", "if not Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx", "sort of delay ball = Ball(pos, vel, team) self.all_sprites.add(ball) self.all_balls.add(ball)", "def update(self): print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite): def __init__(self, pos, vel,", "spawn_ball(self, pos, vel, team): # Todo: Figure out how to", "self.rad) self.x_pos = pos[0] self.y_pos = pos[1] self.rect = self.image.get_rect(center=(self.x_pos,", "pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE], pygame.SRCALPHA) pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad) self.x_pos =", "pygame.draw.circle(self.image, self.file, (self.rad, self.rad), self.rad) self.x_pos = pos[0] self.y_pos =", "all_sprites, all_balls): self.all_sprites = all_sprites self.all_balls = all_balls def spawn_ball(self,", "is a Ball Test!\") print(self) def update(self): print(self.__dict__) print(type(self)) class", "def ball_test(self): print(\"This is a Ball Test!\") print(self) def update(self):", "= vel[1] def update(self): self.check_boundary() self.x_pos += self.dx self.y_pos +=", "print(self) def update(self): print(self.__dict__) print(type(self)) class Ball(pygame.sprite.Sprite): def __init__(self, pos,", "mouse def check_boundary(self): if not Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH):", "pygame import pygame.gfxdraw from constants import Constants class Balls(object): def", "= -1*self.dx if not 0 <= self.y_pos <= Constants.SCREEN_HEIGHT: self.dy", "<= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx if not 0 <= self.y_pos", "self.file = Constants.BALL_TEAMS[self.color] self.rad = int(Constants.BALL_SIZE/2) self.image = pygame.Surface([Constants.BALL_SIZE, Constants.BALL_SIZE],", "Balls(object): def __init__(self, all_sprites, all_balls): self.all_sprites = all_sprites self.all_balls =", "(self.rad, self.rad), self.rad) self.x_pos = pos[0] self.y_pos = pos[1] self.rect", "team): super().__init__() self.color = team self.file = Constants.BALL_TEAMS[self.color] self.rad =", "Constants.PLAYER_WIDTH <= self.x_pos <= (Constants.PLAYER_WIDTH+Constants.BOARD_WIDTH): self.dx = -1*self.dx if not", "all_balls def spawn_ball(self, pos, vel, team): # Todo: Figure out", "__init__(self, pos, vel, team): super().__init__() self.color = team self.file =", "+= self.dx self.y_pos += self.dy self.rect.center = [self.x_pos, self.y_pos] #" ]
[ "#============================================================================== # Copy ROI on button click. #============================================================================== def cpROI(self):", "self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else: self.divVal = self.diag.imv.currentIndex", "window x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) # Add button layout to", "and ROI. self.getSeqValsAndCurrROI() # 0 or 90 degree angles seem", "self.tSeqd = np.zeros_like(ims) for l in range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T", "# Close button - not implemented (hidden) #============================================================================== #============================================================================== #", "bottomMost = ySorted[:2, :] topMost = ySorted[2:, :] # Get", "app.quit() # else: # event.ignore() # #============================================================================== #============================================================================== # #self.originalEggRotBBox", "# Random angle ROIs # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2],", "Highlight first row self.diag.table.selectRow(0) # Make layout checkLayout = QGridLayout()", "#============================================================================== # Get image #============================================================================== def imImport(self): for f in", "Remove ROI self.diag.imv.removeItem(self.roi) # Store nans in place of ROI", "buttons self.cpROI_btn = QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use Copied", "pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else: # # Random angle", "-1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom most,", "self.roi.addScaleHandle([0, 0], [1, 1]) # self.roi.setPen('y',width=3) # self.roi.removable # self.roi.invertible", "button clicked. #============================================================================== def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()]", "# Generate data for populating the embryo/approveROI table. #============================================================================== def", "getSeqValsAndCurrROI(self): # Calculate the indices for current frame if self.eggInt", "self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add the ROI to", "-90: #self.currROI_eggRotBBox[4] = -89 # Get rotated bounding box points", "ROI to ImageItem self.diag.show() # Call function to add data", "distance as dist import glob import re import os from", "from PyQt5.QtGui import * import sys import cv2 import pandas", "the previously updated or taking the unaltered ROI from OpenCV", "angles, require different of the X size # Rectangular ROI", "0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1], [0.5,0.5]) # self.roi.addScaleHandle([1, 1], [0, 0])", "self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal]", "will be our bottom-right point D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0]", "# Exit - prompt user to confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect", "in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles,", "not implemented (hidden) #============================================================================== #============================================================================== # def closeEvent(self, event): #", "with the largest distance will be our bottom-right point D", "update=True) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi", ": take eggID defined ROIs and visualise ''' sliderUpdate =", "Rectangular ROI used to enable more easy handling of corner", "= pg.ImageView() self.btn_save = QPushButton('Save', self) #============================================================================== # #============================================================================== def", "| (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] == 0.0): # self.roi =", "89 as a bodge fix. if self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4]", "layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format images for pyqtgraph and", "#============================================================================== #============================================================================== # def updateImage(self): # self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] =", "'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders = [] for n, key", "chagnges. if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4]", "1]), :] # (bl, br) = bottomMost # # Use", "1]), :] # Get bottom most, and top most sorted", "super(eggUI, self).__init__(parent) # Make QDialog self.diag = QtGui.QDialog() global parentPath,", "0 or 90 degree angles seem very buggy. Shift to", "OpenCV. #============================================================================== def createOpenCVEggROI(self): # Get relevant sequence position and", "= self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] # # Get bottom most, and", "# Make var for dealing with modifications to roi self.updatedEggROI=[]", ") if eggBBAng == -90: eggBBAng = -89 elif eggBBAng", "np.nan, np.nan] #============================================================================== # Copy ROI on button click. #==============================================================================", "#============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo()", "self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate a pyqtgraph ROI, using", "ROIs and visualise ''' sliderUpdate = QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal()", "sequence ROIs if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal", "topMost[np.argsort(D)[::-1], :] # # Make ROI - note non 0,or", "for m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n,", "video position and ROI self.getSeqValsAndCurrROI() # 0 or 90 degree", "ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn =", "scale, angle and position roiChanges = self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0]", "PyQt5.QtGui import * import sys import cv2 import pandas as", "self.eggRotBBox = eggRotBBox self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints", "== -0.0)| (self.currROI_eggRotBBox[4] == 0.0): self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2],", "# Update image iteratively when slider moved #============================================================================== #============================================================================== #", "# Connect changes in timeline so correct ROI is created", "1], [0.5,0.5]) # self.roi.addScaleHandle([1, 1], [0, 0]) # self.roi.addScaleHandle([0, 0],", "different alignment data just on the first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2)", "-1 # Save updated # If more than one frame", "# topMost = ySorted[2:, :] # # Get bottom most", "from PyQt5.QtCore import * from PyQt5.QtGui import * import sys", "Make ROI - note non 0,or 90 degree angles, require", "= (im/ran) # out = out-out.min() # self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8)", "(((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h", "np.zeros_like(ims) for l in range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T #============================================================================== #", "Store nans in place of ROI if self.eggInt != 1234:", "for l in range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T #============================================================================== # Get", "for tracking user chagnges. if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4]", "# Store copied ROI to embryo sequence ROIs if self.eggInt", "our bottom-right point # D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] #", "pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: # Get rotated bounding box", "# Rectangular ROI used to enable more easy handling of", "self.getSeqValsAndCurrROI() # # Get rotated bounding box points # ySorted", "button layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format images for pyqtgraph", "int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate", "fix. if self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4] = -89 # Get", "used to enable more easy handling of corner handles for", "else: self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] =", "exit the program?\" # reply = QtGui.QMessageBox.question(self, 'Message', # quit_msg,", "displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate data for populating the", "(eggBBW, eggBBH), eggBBAng)) # Otherwise just save simply else: self.eggRotBBox[0,self.intDivVal]", "(hidden) #============================================================================== #============================================================================== # def closeEvent(self, event): # # quit_msg", "0], [1, 1]) # self.roi.setPen('y',width=3) # self.roi.removable # self.roi.invertible =", "as pg #from PyQt4.Qt import * #%% class eggUI(QDialog): '''", "m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem)", "for pyqtgraph ROIs self.tSeqd = np.zeros_like(ims) for l in range(len(self.tSeqd)):", "size of Table self.diag.table.resizeRowsToContents() #============================================================================== # Update the user interface", "[eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW,", "# Generate a pyqtgraph ROI, using data from OpenCV. #==============================================================================", "#============================================================================== def updateOpenCVEggROICurrEmbryo(self): # Remove previous if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi)", "= int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal]", "eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) #", "cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran = (im.max()-im.min())/255. # out = (im/ran) #", "out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() # #======== #============================================================================== #============================================================================== #", "= embryoLabels self.diag.setWindowTitle('Identify eggs') # Make ImageView self.diag.imv = pg.ImageView()", "= pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add handles", "import * import sys import cv2 import pandas as pd", "the Euclidean distance between the # The point with the", "distance between the # The point with the largest distance", "pyqtgraph ROI, using data from OpenCV. #============================================================================== def createOpenCVEggROI(self): #", "nan to current ROI if 'No Egg' button clicked #==============================================================================", "self.diag.table.selectRow(0) # Make layout checkLayout = QGridLayout() # Deal with", "roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else: # #", "topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: #", "### Still to do... # self.diag.imv.addItem(self.roi) # self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== #===============", "eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) #============================================================================== #", "# Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size of Table self.diag.table.resizeRowsToContents()", "== QtGui.QMessageBox.Yes: # #event.accept() # app.quit() # else: # event.ignore()", "# Update the ROI for current embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self):", "a pyqtgraph ROI, using data from OpenCV. #============================================================================== def createOpenCVEggROI(self):", "# Update ROI, either updating the previously updated or taking", "self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom most, and top most", "event.ignore() # #============================================================================== #============================================================================== # #self.originalEggRotBBox = eggRotBBox.copy() # #self.originalEggBoxPoints", "out = out-out.min() # self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) #", "first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight first row self.diag.table.selectRow(0) #", "pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]],", "image #============================================================================== def imImport(self): for f in range(len(self.eggUIimPaths)): im =", "and top most sorted corner points bottomMost = ySorted[:2, :]", "= [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter data onto Table horHeaders =", "self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No", "# Update table when approve ROI button clicked. #============================================================================== def", "import QtGui from PyQt5.QtCore import * from PyQt5.QtGui import *", "eggBBAng == -0: eggBBAng = -1 # Save updated #", "import pandas as pd from PyQt5.Qt import * import pyqtgraph", "# Adjust size of Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #============================================================================== #", "changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY, changeAngle # Get changes", "to embryo sequence ROIs if self.eggInt != 1234: self.divVal =", "defined ROIs and visualise ''' sliderUpdate = QtCore.pyqtSignal() embryoUpdate =", "# Make layout checkLayout = QGridLayout() # Deal with stretching", "= pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug # print 'no", "= topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif", "self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints #============================================================================== # Assign nan", "bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True) # # roi =", "[0.5,0.5]) self.roi.addScaleHandle([1, 1], [0, 0]) self.roi.addScaleHandle([0, 0], [1, 1]) self.roi.setPen('y',width=3)", "# #self.originalEggBoxPoints = eggBoxPoints.copy() # #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints", "self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #============================================================================== # Update table when approve ROI", "self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI for new embryo. #============================================================================== def", "ROI for new embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self): # Remove old", "= self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran", "import * import pyqtgraph as pg #from PyQt4.Qt import *", "button clicked #============================================================================== def recordNoEgg(self): # Remove ROI self.diag.imv.removeItem(self.roi) #", "easy handling of corner handles for tracking user chagnges. #", "so correct ROI is created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #==============================================================================", "# def updateImage(self): # self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq #", "# Copy ROI on button click. #============================================================================== def cpROI(self): self.originalEggRotBBox", "parentPath, embryo): self.parentPath = parentPath self.embryo = embryo self.embryoFolders =", "particular embryo #============================================================================== def getEmbryoFolders(self, parentPath, embryo): self.parentPath = parentPath", "the program?\" # reply = QtGui.QMessageBox.question(self, 'Message', # quit_msg, QtGui.QMessageBox.Yes,", "# self.roi.invertible = 'True' # # Make var for dealing", "= self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal]))", "pyqtgraph.Qt import QtCore, QtGui import numpy as np from scipy.spatial", "#============================================================================== def imImport(self): for f in range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH)", "Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI. #==============================================================================", "Get relevant video position and ROI. # self.getSeqValsAndCurrROI() # #", "Store copied ROI to embryo sequence ROIs if self.eggInt !=", "- self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal", "of Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #============================================================================== # Update table when", "self.diag.setWindowTitle('Identify eggs') self.diag.imv = pg.ImageView() self.btn_save = QPushButton('Save', self) #==============================================================================", "QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal() def __init__(self, parent=None):", "dist import glob import re import os from PyQt5 import", "QtGui.QMessageBox.No) # # if reply == QtGui.QMessageBox.Yes: # #event.accept() #", "self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # # Modified version of updateOpenCVEggROICurrEmbryo # #", "in ImageView # self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) # Add the ROI", "def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders", "self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng]", "each corner. Get handle positions self.xyPosHandles =[] for h in", "self.btn_save = QPushButton('Save', self) #============================================================================== # #============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints,", "#============================================================================== def getEmbryoFolders(self, parentPath, embryo): self.parentPath = parentPath self.embryo =", "if reply == QtGui.QMessageBox.Yes: # #event.accept() # app.quit() # else:", "= eggRotBBox.copy() # #self.originalEggBoxPoints = eggBoxPoints.copy() # #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal]", "Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of Table self.diag.table.resizeRowsToContents() #============================================================================== # Update", "and visualise ''' sliderUpdate = QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal() keyPressed", "self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button not implemented, just use", "= self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.currROI_eggRotBBox", "self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints =", "(im/ran) out = out-out.min() self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T", "90 degree angles, require different of the X size #", "self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan]", "Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan to frames not", "the first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight first row self.diag.table.selectRow(0)", "pyqtgraph as pg #from PyQt4.Qt import * #%% class eggUI(QDialog):", "np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] =", "= self.originalEggBoxPoints else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal]", "= self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # # Modified version of updateOpenCVEggROICurrEmbryo #", "(eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if eggBBAng ==", "= topMost[np.argsort(D)[::-1], :] # # Make ROI - note non", "newitem) newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of Table self.diag.table.resizeRowsToContents()", "ROI is created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate", "(self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0):", "#self.exit_btn.clicked.connect(self.closeEvent) # Connect changes in timeline so correct ROI is", "else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal]", "= topMost[np.argsort(D)[::-1], :] # Make ROI - note non 0,or", "self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4] = -89 # Get rotated bounding", "# Random angle ROIs self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])", "self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] =", "box points # ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] # #", "1]) # self.roi.setPen('y',width=3) # self.roi.removable # self.roi.invertible = 'True' #", "Get image #============================================================================== def imImport(self): for f in range(len(self.eggUIimPaths)): im", "#self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran = (im.max()-im.min())/255. #", "self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #============================================================================== #============================================================================== #", "hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32)", "Make layout checkLayout = QGridLayout() # Deal with stretching for", "var for dealing with modifications to roi # self.updatedEggROI=[] #", "for current embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self): # Remove previous if", "self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== # Update the ROI for current", "eggRotBBox[vidTime][3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add", "\"*/\" + embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get image #==============================================================================", "of ROI if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan,", "#Adjust size of Table self.diag.table.resizeRowsToContents() #============================================================================== # Update the user", "frame if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal =", "(im.max()-im.min())/255. out = (im/ran) out = out-out.min() self.compSeq[int(f)] = out.astype(np.uint8)", "from the dataHandling class #============================================================================== def formatSequence(self,ims): # Format seq", "self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate data for populating the embryo/approveROI", "in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m]) #Add", "ROI, using data from OpenCV. #============================================================================== def createOpenCVEggROI(self): # Get", "top most sorted corner points bottomMost = ySorted[:2, :] topMost", "dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels), 'ROI approved':['No'] * len(list(self.embryoLabels))} self.tableCols =", "anchor to calculate the Euclidean distance between the # The", "# self.roi.addScaleHandle([1, 1], [0, 0]) # self.roi.addScaleHandle([0, 0], [1, 1])", "# Copy ROI on button click. #============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI()", "self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets different alignment data just on the", "# Enter data onto Table horHeaders = [] for n,", "= self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] =", "position and ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] != 'nan'): # 0", "= -89 # Get rotated bounding box points ySorted =", "self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal)", "with modifications to roi self.updatedEggROI=[] ### Still to do... self.diag.imv.addItem(self.roi)", "print 'no angle' else: # Random angle ROIs self.roi =", "self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox =", "click. #============================================================================== def cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints", "= 'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders = [] for n,", "= int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #============================================================================== #", "def closeEvent(self, event): # # quit_msg = \"Are you sure", "parentPath, vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv = pg.ImageView() self.btn_save = QPushButton('Save',", "eggBBAng)) # Otherwise just save simply else: self.eggRotBBox[0,self.intDivVal] = [eggBBX,", "of the X size # Rectangular ROI used to enable", "'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position and ROI. self.getSeqValsAndCurrROI()", "position and ROI self.getSeqValsAndCurrROI() # 0 or 90 degree angles", "topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0],", "elif self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4] = -1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:,", "eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) # Otherwise", "# Deal with stretching for approrpraite formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1,", "appropriately for pyqtgraph ROIs self.tSeqd = np.zeros_like(ims) for l in", "self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal = self.diag.imv.currentIndex self.intDivVal =", "Copy ROI on button click. #============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI() #", "pd from PyQt5.Qt import * import pyqtgraph as pg #from", "ROI. self.getSeqValsAndCurrROI() # 0 or 90 degree angles seem very", "import distance as dist import glob import re import os", "self) #============================================================================== # #============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt): self.eggInt", "the ROI to ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== # Deal with data", "and ROI. # self.getSeqValsAndCurrROI() # # Get rotated bounding box", "= QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button not", "= pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]],", "eggBBY), (eggBBW, eggBBH), eggBBAng)) # Otherwise just save simply else:", "== -90.0) | (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] == 0.0): #", "# Assign nan to current ROI if 'No Egg' button", "= bottomMost # # Use bottom-left coordinate as anchor to", "if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position and", "self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True) #", "most # bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] # (bl, br)", "= int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox", "l in range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T #============================================================================== # Get folders", "# Remove old ROI if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get", "widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make tableview self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True)", "[-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0,", "# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # # Add", "eggs') # Make ImageView self.diag.imv = pg.ImageView() self.diag.resize(1000,600) # Make", "# self.getSeqValsAndCurrROI() # # Get rotated bounding box points #", "\"euclidean\")[0] # (tl, tr) = topMost[np.argsort(D)[::-1], :] # # Make", "cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) # Otherwise just save simply", "self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1], [0.5,0.5]) # self.roi.addScaleHandle([1, 1], [0,", "pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4] =", "* #%% class eggUI(QDialog): ''' createOpenCVEggROI : take eggID defined", "Connect changes in timeline so correct ROI is created and", "self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox", "function to add data self.dataForTable() # Function for modifying the", "# Get index values for ROI data. #============================================================================== def getSeqValsAndCurrROI(self):", "# else: # # Random angle ROIs # self.roi =", "= QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40);", "self.imImport() self.diag.imv.setImage(self.compSeq) # Add the ROI to ImageItem self.diag.show() #", "(self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] == 0.0): # self.roi = pg.ROI([bottomMost[0][0],", "dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] # Make", "self.currROI_eggRotBBox[3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug", "# Call function to add data self.dataForTable() # Function for", "Get index values for ROI data. #============================================================================== def getSeqValsAndCurrROI(self): #", "xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY, changeAngle #", "eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy() #============================================================================== # Get index values for", "= cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) #============================================================================== # Copy ROI", "#============================================================================== # def updateImage(self): # self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq", "embryo): self.parentPath = parentPath self.embryo = embryo self.embryoFolders = glob.glob(parentPath", "self.diag.imv.removeItem(self.roi) # # Get relevant video position and ROI. #", "require different of the X size # # Rectangular ROI", "clicked. #============================================================================== def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()] =", "the unaltered ROI from OpenCV as a starting point. #if", "to add data self.dataForTable() # Function for modifying the table", "instance. Called from showUI and updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints):", "ROI on button click. #============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store", "showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt): self.eggInt = eggInt self.embryoLabels = embryoLabels", "for new embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self): # Remove old ROI", "rotated bounding box points # ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :]", "= [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY),", "Get folders for a particular embryo #============================================================================== def getEmbryoFolders(self, parentPath,", "Update ROI for new embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self): # Remove", "point # D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] # (tl, tr)", "the table when ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy current", "[self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4] = -179 ySorted", "chagnges. # if (self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4] == -0.0)|", "self.compSeq[f] = self.compSeq[f].T #============================================================================== # Update image iteratively when slider", "= ySorted[:2, :] # topMost = ySorted[2:, :] # #", "= \"Are you sure you want to exit the program?\"", "self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add", "Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #============================================================================== # Update table when approve", "#============================================================================== def recordNoEgg(self): # Remove ROI self.diag.imv.removeItem(self.roi) # Store nans", "# Update ROI. #============================================================================== def updateROI(self): #global vidTime, xyPosHandles, ellipse,", "QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn =", "self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else: self.divVal = self.diag.imv.currentIndex self.intDivVal =", "pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0,", "self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) #============================================================================== # Copy", "#self.currROI_eggRotBBox[4] = -1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get", "take eggID defined ROIs and visualise ''' sliderUpdate = QtCore.pyqtSignal()", "per sequence.. if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY,", "m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n,", "self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make button layout self.btnLayout", "h in hh] # Handle on each corner. Get handle", "tableview self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets different", "import cv2 import pandas as pd from PyQt5.Qt import *", "keyPressed = QtCore.pyqtSignal() def __init__(self, parent=None): super(eggUI, self).__init__(parent) # Make", "= QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make", "getEmbryoFolders(self, parentPath, embryo): self.parentPath = parentPath self.embryo = embryo self.embryoFolders", "egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit - prompt user to confirm #self.exit_btn.clicked.connect(self.closeEvent)", "= [np.nan, np.nan, np.nan, np.nan, np.nan] #============================================================================== # Copy ROI", "= ySorted[:2, :] topMost = ySorted[2:, :] # Get bottom", "(tl, tr) = topMost[np.argsort(D)[::-1], :] # # Make ROI -", "# Get folders for a particular embryo #============================================================================== def getEmbryoFolders(self,", "self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4] = -179 ySorted =", "else: # Random angle ROIs self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2],", "Debug # print 'no angle' else: # Random angle ROIs", "checkLayout = QGridLayout() # Deal with stretching for approrpraite formatting.", "moved #============================================================================== #============================================================================== # def updateImage(self): # self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))]", "\"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] # Make ROI -", "# # Get relevant video position and ROI. # self.getSeqValsAndCurrROI()", "from OpenCV as a starting point. #if len(self.updatedEggROI) == 0:", "self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -180:", "Otherwise just save simply else: self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW,", "-0.0)| (self.currROI_eggRotBBox[4] == 0.0): self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])", "def applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store copied ROI to embryo sequence", "== 0.0): self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi", "slider moved #============================================================================== #============================================================================== # def updateImage(self): # self.getSeqValsAndCurrROI() #", "previously updated or taking the unaltered ROI from OpenCV as", "newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem) # Add Header", "else: # event.ignore() # #============================================================================== #============================================================================== # #self.originalEggRotBBox = eggRotBBox.copy()", "self.roi.invertible = 'True' # # Make var for dealing with", "# Use bottom-left coordinate as anchor to calculate the Euclidean", "Generate data for populating the embryo/approveROI table. #============================================================================== def dataForTable(self):", "topMost = ySorted[2:, :] # Get bottom most bottomMost =", "eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) #==============================================================================", "#self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate data for populating the embryo/approveROI table.", "changeScaleY = roiChanges.getScale()[1] changeAngle = roiChanges.getAngle() # Update ROI, either", "eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if eggBBAng == -90:", "Call function to add data self.dataForTable() # Function for modifying", "== 0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh", "len(self.updatedEggROI) == 0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle)", "tr) = topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]])", "### Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI", "Generate a pyqtgraph ROI, using data from OpenCV. #============================================================================== def", "roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== # Update the ROI for", "the largest distance will be our bottom-right point D =", "-90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0): self.roi =", ":] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: # Get", "Assign nan to current ROI if 'No Egg' button clicked", "# Otherwise just save simply else: self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY,", "#============================================================================== # Update the ROI for current embryo. #============================================================================== def", "Remove old ROI if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant", "ROIs if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal =", "to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI for new", "self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal)", "self.diag.imv.removeItem(self.roi) # Get relevant video position and ROI self.getSeqValsAndCurrROI() #", "# self.roi.addRotateHandle([0, 1], [0.5,0.5]) # self.roi.addScaleHandle([1, 1], [0, 0]) #", "save simply else: self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng]", "var for dealing with modifications to roi self.updatedEggROI=[] ### Still", "eggBBAng)) #============================================================================== # Copy ROI on button click. #============================================================================== def", "just save simply else: self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW, eggBBH,", "angle ROIs self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True)", "button not implemented, just use window x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1)", "or taking the unaltered ROI from OpenCV as a starting", "im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255. out = (im/ran) out", ":] # Make ROI - note non 0,or 90 degree", "# print 'no angle' else: # Random angle ROIs self.roi", "1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3) # Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2)", "eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if eggBBAng == -90: eggBBAng", "roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY, changeAngle # Get changes to", "# #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH)", "self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1],", "= parentPath self.embryo = embryo self.embryoFolders = glob.glob(parentPath + \"*/\"", "self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get image #============================================================================== def imImport(self): for f", "pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2],", "# self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() # #======== #============================================================================== #============================================================================== # ROI", "!= 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal]", "the # The point with the largest distance will be", "nans in place of ROI if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal]", "self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5])", "n, newitem) # Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size of", "(self.originalEggRotBBox[4] == 0.0): # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]])", "self.roi.addScaleHandle([1, 1], [0, 0]) # self.roi.addScaleHandle([0, 0], [1, 1]) #", "Random angle ROIs self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4],", "# D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] # (tl, tr) =", "Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi)", "(bl, br) = bottomMost # Use bottom-left coordinate as anchor", "self.roi.setAngle(self.originalEggRotBBox[4], update=True) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])", "[-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5])", "interface #============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI()", "embryo #============================================================================== def getEmbryoFolders(self, parentPath, embryo): self.parentPath = parentPath self.embryo", "eggID per sequence.. if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX,", "= [np.nan, np.nan, np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else:", "update=True) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1,", "self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) # Add the ROI to ImageItem self.diag.show()", "= -1 # Save updated # If more than one", "[1, 1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible = 'True' # Make var", "new embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self): # Remove old ROI if", "def createOpenCVEggROI(self): # Get relevant sequence position and ROI. self.getSeqValsAndCurrROI()", "= -roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0] changeScaleY =", "eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy() #============================================================================== # Get", "ROI from OpenCV as a starting point. #if len(self.updatedEggROI) ==", "#============================================================================== # #============================================================================== #============================================================================== # Close button - not implemented", "tracking user chagnges. # if (self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4]", "[eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else: # # Random angle ROIs #", "#============================================================================== def cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints #==============================================================================", "Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI for", "if (self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] ==", "-90.0) | (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] == 0.0): # self.roi", "# Make tableview self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) #", "# Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply layout self.diag.setLayout(checkLayout)", "#self.currROI_eggRotBBox[4] = -89 # Get rotated bounding box points ySorted", "Called from showUI and updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox", "# self.roi.removable # self.roi.invertible = 'True' # # Make var", "QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make button layout self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0)", "QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal() def __init__(self, parent=None): super(eggUI, self).__init__(parent) #", "# self.diag.table.resizeColumnsToContents() #============================================================================== # Update table when approve ROI button", "self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints", "most, and top most sorted corner points # bottomMost =", "= QtGui.QColor(0,100,0,120) horHeaders = [] for n, key in enumerate(sorted(self.tableData.keys())):", "self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== # Update the ROI for current embryo.", "the embryo/approveROI table. #============================================================================== def dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels), 'ROI", "vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY, changeAngle", "self.diag.imv = pg.ImageView() self.diag.resize(1000,600) # Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if", "want to exit the program?\" # reply = QtGui.QMessageBox.question(self, 'Message',", "= pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4]", "ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] != 'nan'): # 0 or 90", "Enter data onto Table horHeaders = [] for n, key", "x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) # Add button layout to GridLayout.", "# Add the ROI to ImageItem self.diag.show() # Call function", "self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add the ROI", "ROI self.getSeqValsAndCurrROI() # 0 or 90 degree angles seem very", "# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add handles", "(bl, br) = bottomMost # # Use bottom-left coordinate as", "current embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self): # Remove previous if (hasattr(self,", "changes to ROI scale, angle and position roiChanges = self.roi.getGlobalTransform()", "on the first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight first row", "or 90 degree angles seem very buggy. Shift to 1", "to ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== # Deal with data from the", "self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im =", "(((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles() hh =", "pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi = pg.EllipseROI([bottomMost[0][0],", "self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles()", "# Import OpenCV determined ROIs from dataHandling instance. Called from", "changeAngle = roiChanges.getAngle() # Update ROI, either updating the previously", "one frame eggID per sequence.. if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal]", "self.diag.setLayout(checkLayout) # Make buttons self.cpROI_btn = QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn", "#============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox = eggRotBBox self.eggBoxPoints = eggBoxPoints", "= eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy() #============================================================================== # Get index values", "br) = bottomMost # Use bottom-left coordinate as anchor to", ":] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] ==", "ROI used to enable more easy handling of corner handles", "self.diag = QtGui.QDialog() global parentPath, vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv =", "| (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0): self.roi = pg.ROI([bl[0],", "eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng))", "# if (self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4]", "self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) #", "out = out-out.min() self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T #==============================================================================", "self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #============================================================================== #============================================================================== # Close button - not", "Exit button not implemented, just use window x (topRight). #", "# # if reply == QtGui.QMessageBox.Yes: # #event.accept() # app.quit()", "self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] =", "event): # # quit_msg = \"Are you sure you want", "angles, require different of the X size # # Rectangular", "checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3) # Add", "from ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make tableview self.diag.table =", "# im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran = (im.max()-im.min())/255. # out", "tr) = topMost[np.argsort(D)[::-1], :] # Make ROI - note non", "from PyQt5.Qt import * import pyqtgraph as pg #from PyQt4.Qt", "[eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW,", "and ROI self.getSeqValsAndCurrROI() # 0 or 90 degree angles seem", "= self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal = self.diag.imv.currentIndex self.intDivVal", "'No Egg' button clicked #============================================================================== def recordNoEgg(self): # Remove ROI", "do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI. #============================================================================== def updateROI(self):", "[self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in hh] # Handle on each", "Get relevant video position and ROI. self.getSeqValsAndCurrROI() # 0 or", "out-out.min() # self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() #", "in range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255. out =", "self.originalEggBoxPoints = self.currROI_eggBoxPoints #============================================================================== # Assign nan to current ROI", "modifications to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== # Update the", "# Get rotated bounding box points # ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:,", "to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== # Update the ROI", "== -90: #self.currROI_eggRotBBox[4] = -89 # Get rotated bounding box", "self.roi.setPen('y',width=3) # self.roi.removable # self.roi.invertible = 'True' # # Make", "# Get relevant video position and ROI self.getSeqValsAndCurrROI() # 0", "bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]],", "self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button not implemented, just", "and updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox = eggRotBBox self.eggBoxPoints", "Format seq appropriately for pyqtgraph ROIs self.tSeqd = np.zeros_like(ims) for", "= out-out.min() # self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show()", "self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan,", "self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi = pg.EllipseROI([bottomMost[0][0],", "frame eggID per sequence.. if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] =", "data. #============================================================================== def getSeqValsAndCurrROI(self): # Calculate the indices for current", "ROI for current embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self): # Remove previous", "# Apply layout self.diag.setLayout(checkLayout) # Make buttons self.cpROI_btn = QtGui.QPushButton('&Copy", "# if reply == QtGui.QMessageBox.Yes: # #event.accept() # app.quit() #", "Exit - prompt user to confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect changes", "# ran = (im.max()-im.min())/255. # out = (im/ran) # out", "= QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make button layout self.btnLayout = QGridLayout()", "# Get changes to ROI scale, angle and position roiChanges", "import sys import cv2 import pandas as pd from PyQt5.Qt", "# Remove previous if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant", "Use bottom-left coordinate as anchor to calculate the Euclidean distance", "= self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in hh]", "(hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position and ROI.", "= (im/ran) out = out-out.min() self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f] =", "self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan, np.nan, np.nan] #============================================================================== # Copy", "#============================================================================== #============================================================================== # #self.originalEggRotBBox = eggRotBBox.copy() # #self.originalEggBoxPoints = eggBoxPoints.copy()", "= self.compSeq[f].T #============================================================================== # Update image iteratively when slider moved", "# self.btnLayout.addWidget(self.exit_btn,2,1) # Add button layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) #", "embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get image #============================================================================== def imImport(self):", "Add button layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format images for", "self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T #============================================================================== # Update image", "self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1, 1], [0, 0]) self.roi.addScaleHandle([0, 0], [1,", "angle' else: # Random angle ROIs self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]],", "ROI if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position", "importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox = eggRotBBox self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox =", "corner. Get handle positions self.xyPosHandles =[] for h in hh:", "for a particular embryo #============================================================================== def getEmbryoFolders(self, parentPath, embryo): self.parentPath", "checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3) # Add to layout", "parent=None): super(eggUI, self).__init__(parent) # Make QDialog self.diag = QtGui.QDialog() global", "self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight first row self.diag.table.selectRow(0) # Make layout", "#self.diag.imv.addItem(self.roi) #============================================================================== # Deal with data from the dataHandling class", "bottomMost[np.argsort(bottomMost[:, 1]), :] # (bl, br) = bottomMost # #", "unaltered ROI from OpenCV as a starting point. #if len(self.updatedEggROI)", "#self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) #", "size # # Rectangular ROI used to enable more easy", "# # quit_msg = \"Are you sure you want to", "len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter data onto Table", "Update the ROI for current embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self): #", "pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4] =", "# Format images for pyqtgraph and put in ImageView #", ":] # Get bottom most bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]", "= QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust", "# If more than one frame eggID per sequence.. if", "ROIs self.tSeqd = np.zeros_like(ims) for l in range(len(self.tSeqd)): self.tSeqd[l] =", "Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit')", "Update table when approve ROI button clicked. #============================================================================== def updateTable(self):", "Function for modifying the table when ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable)", "our bottom-right point D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr)", "handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1, 1], [0, 0])", "numpy as np from scipy.spatial import distance as dist import", "degree angles, require different of the X size # #", "# ROI functions #============================================================================== #============================================================================== # Import OpenCV determined ROIs", "QtGui.QColor(0,100,0,120) horHeaders = [] for n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key)", "QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn", "newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of Table self.diag.table.resizeRowsToContents() #==============================================================================", "most sorted corner points # bottomMost = ySorted[:2, :] #", "self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # # roi =", "Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) #", "ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] # # Get bottom most,", "#============================================================================== #============================================================================== # ROI functions #============================================================================== #============================================================================== # Import OpenCV", "# Add the ROI to ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== # Deal", "self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #============================================================================== #============================================================================== # Close button -", "to current ROI if 'No Egg' button clicked #============================================================================== def", "# #event.accept() # app.quit() # else: # event.ignore() # #==============================================================================", "= self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom most, and top", "[0, 0]) # self.roi.addScaleHandle([0, 0], [1, 1]) # self.roi.setPen('y',width=3) #", "top most sorted corner points # bottomMost = ySorted[:2, :]", "= -179 elif eggBBAng == -0: eggBBAng = -1 #", "= QtGui.QDialog() global parentPath, vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv = pg.ImageView()", "[self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4] = -1 ySorted", "'Sorted']) # Sets different alignment data just on the first", "= roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1] changeAngle =", "range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255. out = (im/ran)", "[self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])", "self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make button layout self.btnLayout =", "when ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI)", "self.diag.imv.removeItem(self.roi) # Store nans in place of ROI if self.eggInt", "Update ROI, either updating the previously updated or taking the", "= eggBoxPoints.copy() #============================================================================== # Get index values for ROI data.", "#============================================================================== def dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels), 'ROI approved':['No'] * len(list(self.embryoLabels))}", "== -90: eggBBAng = -89 elif eggBBAng == -180: eggBBAng", "roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi = pg.EllipseROI([bottomMost[0][0],", "eggBoxPoints): self.eggRotBBox = eggRotBBox self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy()", "#============================================================================== # Update ROI for new embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self):", "= QtCore.pyqtSignal() def __init__(self, parent=None): super(eggUI, self).__init__(parent) # Make QDialog", "topMost, \"euclidean\")[0] # (tl, tr) = topMost[np.argsort(D)[::-1], :] # #", "QtGui.QMessageBox.Yes: # #event.accept() # app.quit() # else: # event.ignore() #", "= eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy() #============================================================================== #", "updateOpenCVEggROICurrEmbryo(self): # Remove previous if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get", "self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets different alignment", "key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m, item in enumerate(self.tableData[key]): newitem", "formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3) #", "reply = QtGui.QMessageBox.question(self, 'Message', # quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # #", "alignment data just on the first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) #", "# Deal with data from the dataHandling class #============================================================================== def", "Assign nan to frames not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit", "= eggRotBBox self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints =", "def updateOpenCVEggROINewEmbryo(self): # Remove old ROI if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi)", "ySorted[2:, :] # Get bottom most bottomMost = bottomMost[np.argsort(bottomMost[:, 1]),", "= bottomMost[np.argsort(bottomMost[:, 1]), :] # (bl, br) = bottomMost #", "checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply layout self.diag.setLayout(checkLayout) # Make buttons self.cpROI_btn", "(eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove buttons from ImageView", "self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI. #============================================================================== def updateROI(self): #global vidTime,", "ROI functions #============================================================================== #============================================================================== # Import OpenCV determined ROIs from", "Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size of Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents()", "largest distance will be our bottom-right point # D =", "the ROI to ImageItem self.diag.show() # Call function to add", "elif self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4] = -179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:,", "size of Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #============================================================================== # Update table", "click. #============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store copied ROI to", "bottomMost # Use bottom-left coordinate as anchor to calculate the", "3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3) # Add to", "int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else:", "self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan, np.nan, np.nan]", "#self.originalEggBoxPoints = eggBoxPoints.copy() # #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints =", "hh] # Handle on each corner. Get handle positions self.xyPosHandles", "eggBBAng = -179 elif eggBBAng == -0: eggBBAng = -1", "[-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True) # # roi = pg.EllipseROI([bottomMost[0][0],", "Get rotated bounding box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]", "self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo()", "self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else: self.divVal =", "embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self): # Remove previous if (hasattr(self, 'roi')):", "self.diag.table.setColumnCount(2) # Highlight first row self.diag.table.selectRow(0) # Make layout checkLayout", "# out = out-out.min() # self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T)", "createOpenCVEggROI(self): # Get relevant sequence position and ROI. self.getSeqValsAndCurrROI() if", "else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan, np.nan,", "buttons from ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make tableview self.diag.table", "topMost[np.argsort(D)[::-1], :] # Make ROI - note non 0,or 90", "# # The point with the largest distance will be", "n, newitem) newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of Table", "user to confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect changes in timeline so", "sorted corner points bottomMost = ySorted[:2, :] topMost = ySorted[2:,", "0], [1, 1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible = 'True' # Make", "!= 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal =", "Handle on each corner. Get handle positions self.xyPosHandles =[] for", "sequence position and ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] != 'nan'): #", "'nan'): # 0 or 90 degree angles seem very buggy.", "# #self.originalEggRotBBox = eggRotBBox.copy() # #self.originalEggBoxPoints = eggBoxPoints.copy() # #self.currROI_eggRotBBox", "self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() # #======== #==============================================================================", "reply == QtGui.QMessageBox.Yes: # #event.accept() # app.quit() # else: #", "self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] # # Get bottom most, and top", "# bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] # (bl, br) =", "updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders =", "1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] =", "when slider moved #============================================================================== #============================================================================== # def updateImage(self): # self.getSeqValsAndCurrROI()", "Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply layout self.diag.setLayout(checkLayout) #", "Remove previous # self.diag.imv.removeItem(self.roi) # # Get relevant video position", "bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2],", "def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt): self.eggInt = eggInt self.embryoLabels =", "= ySorted[2:, :] # # Get bottom most # bottomMost", "button layout self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) #", "bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else: # # Random angle ROIs", "table. #============================================================================== def dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels), 'ROI approved':['No'] *", "0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh =", "[0.5,0.5]) # self.roi.addScaleHandle([1, 1], [0, 0]) # self.roi.addScaleHandle([0, 0], [1,", "put in ImageView # self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) # Add the", "(hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position and ROI", "sorted corner points # bottomMost = ySorted[:2, :] # topMost", "Format images for pyqtgraph and put in ImageView # self.formatSequence(ims)", "# #============================================================================== #============================================================================== # Close button - not implemented (hidden)", "for tracking user chagnges. # if (self.originalEggRotBBox[4] == -90.0) |", "# self.roi.setAngle(self.originalEggRotBBox[4], update=True) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2],", "for h in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng", "enable more easy handling of corner handles for tracking user", "self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal", "eggBBH), eggBBAng)) #============================================================================== # Copy ROI on button click. #==============================================================================", "int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal]", "changes in timeline so correct ROI is created and displayed.", "bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1],", "bottom-right point D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) =", "ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) #", "# self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() # #========", "bottomMost = ySorted[:2, :] # topMost = ySorted[2:, :] #", "to calculate the Euclidean distance between the # # The", "self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0]", "updateImage(self): # self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) #", "Get bottom most bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] (bl, br)", "#event.accept() # app.quit() # else: # event.ignore() # #============================================================================== #==============================================================================", "br) = bottomMost # # Use bottom-left coordinate as anchor", "with modifications to roi # self.updatedEggROI=[] # ### Still to", "to ImageItem self.diag.show() # Call function to add data self.dataForTable()", "will be our bottom-right point # D = dist.cdist(bl[np.newaxis], topMost,", "if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal)", "Table horHeaders = [] for n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key)", "Random angle ROIs # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]])", "'Message', # quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # # if reply ==", "eggBoxPoints.copy() # #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] #", "self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve ROIs')", "not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit - prompt user to", "data for populating the embryo/approveROI table. #============================================================================== def dataForTable(self): self.tableData", "= roiChanges.getScale()[1] changeAngle = roiChanges.getAngle() # Update ROI, either updating", "# Remove ROI self.diag.imv.removeItem(self.roi) # Store nans in place of", "* from PyQt5.QtGui import * import sys import cv2 import", "Make button layout self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0)", "= self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate a pyqtgraph", "self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -0:", "ROIs from dataHandling instance. Called from showUI and updateUI. #==============================================================================", "import pyqtgraph as pg #from PyQt4.Qt import * #%% class", "# Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan to frames", "taking the unaltered ROI from OpenCV as a starting point.", "-180: #self.currROI_eggRotBBox[4] = -179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] #", "bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])", "ySorted[2:, :] # # Get bottom most # bottomMost =", "video position and ROI. # self.getSeqValsAndCurrROI() # # Get rotated", "a particular embryo #============================================================================== def getEmbryoFolders(self, parentPath, embryo): self.parentPath =", "1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] =", "and position roiChanges = self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0] changeY =", "#============================================================================== # Deal with data from the dataHandling class #==============================================================================", "with the largest distance will be our bottom-right point #", "#self.originalEggRotBBox = eggRotBBox.copy() # #self.originalEggBoxPoints = eggBoxPoints.copy() # #self.currROI_eggRotBBox =", "for approrpraite formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1,", "points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom most,", "#============================================================================== # Update table when approve ROI button clicked. #==============================================================================", "updated # If more than one frame eggID per sequence..", "user chagnges. # if (self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4] ==", "def dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels), 'ROI approved':['No'] * len(list(self.embryoLabels))} self.tableCols", "# Get bottom most bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] (bl,", "os from PyQt5 import QtGui from PyQt5.QtCore import * from", "enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m]) #Add Header", "when approve ROI button clicked. #============================================================================== def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()]", "anchor to calculate the Euclidean distance between the # #", "with stretching for approrpraite formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0,", "approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied", "seq appropriately for pyqtgraph ROIs self.tSeqd = np.zeros_like(ims) for l", "# self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # # roi", "note non 0,or 90 degree angles, require different of the", "QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button not implemented,", "[np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan, np.nan, np.nan] #============================================================================== #", "different of the X size # Rectangular ROI used to", "D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :]", "bounding box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get", "= {'Embryo':list(self.embryoLabels), 'ROI approved':['No'] * len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels))", "OpenCV as a starting point. #if len(self.updatedEggROI) == 0: self.updatedEggROI", "QtCore, QtGui import numpy as np from scipy.spatial import distance", "timeline so correct ROI is created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent)", "self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of", "topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4]", "QtGui.QMessageBox.question(self, 'Message', # quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # # if reply", "checkLayout.setRowStretch(1, 3) # Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply", "corner handles for tracking user chagnges. if (self.currROI_eggRotBBox[4] == -90.0)", "ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY, changeAngle # Get", "Make var for dealing with modifications to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI)", "in hh] # Handle on each corner. Get handle positions", "= dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] #", "!= 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal]", "self.updatedEggROI=[] ### Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update", "modifications to roi # self.updatedEggROI=[] # ### Still to do...", "= ySorted[2:, :] # Get bottom most bottomMost = bottomMost[np.argsort(bottomMost[:,", "Apply layout self.diag.setLayout(checkLayout) # Make buttons self.cpROI_btn = QtGui.QPushButton('&Copy ROI')", "class #============================================================================== def formatSequence(self,ims): # Format seq appropriately for pyqtgraph", "Calculate the indices for current frame if self.eggInt != 1234:", "'True' # # Make var for dealing with modifications to", "bottom most, and top most sorted corner points bottomMost =", "changeY, changeScaleX, changeScaleY, changeAngle # Get changes to ROI scale,", "and ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] != 'nan'): # 0 or", "dataHandling instance. Called from showUI and updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox,", "'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove buttons from ImageView widget self.diag.imv.ui.roiBtn.hide()", "PyQt5.Qt import * import pyqtgraph as pg #from PyQt4.Qt import", "''' createOpenCVEggROI : take eggID defined ROIs and visualise '''", "= pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5])", "0,or 90 degree angles, require different of the X size", "cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255. out = (im/ran) out = out-out.min()", "1], [0, 0]) # self.roi.addScaleHandle([0, 0], [1, 1]) # self.roi.setPen('y',width=3)", "as pd from PyQt5.Qt import * import pyqtgraph as pg", "from OpenCV. #============================================================================== def createOpenCVEggROI(self): # Get relevant sequence position", "\"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]],", "[np.nan, np.nan, np.nan, np.nan, np.nan] #============================================================================== # Copy ROI on", "ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make tableview self.diag.table = QtGui.QTableWidget()", "as a bodge fix. if self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4] =", "self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal]", "= roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1] changeAngle = roiChanges.getAngle() # Update", "def updateROI(self): #global vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY,", "newitem) # Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size of Table", "seem very buggy. Shift to 1 and 89 as a", "self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal]", "eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng))", "# ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] # # Get bottom", "-89 elif eggBBAng == -180: eggBBAng = -179 elif eggBBAng", "= QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets different alignment data", "to roi self.updatedEggROI=[] ### Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #==============================================================================", "Make buttons self.cpROI_btn = QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use", "class eggUI(QDialog): ''' createOpenCVEggROI : take eggID defined ROIs and", "# (bl, br) = bottomMost # # Use bottom-left coordinate", "ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom most, and", "# Make ROI - note non 0,or 90 degree angles,", ":] # Get bottom most, and top most sorted corner", "QtGui from PyQt5.QtCore import * from PyQt5.QtGui import * import", "eggRotBBox[vidTime][3]]) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1,", "# #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran = (im.max()-im.min())/255.", "eggUI(QDialog): ''' createOpenCVEggROI : take eggID defined ROIs and visualise", "created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate data for", "def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox = eggRotBBox self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox", "= [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan, np.nan, np.nan] #==============================================================================", "embryo self.embryoFolders = glob.glob(parentPath + \"*/\" + embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime)", "# ### Still to do... # self.diag.imv.addItem(self.roi) # self.roi.sigRegionChangeFinished.connect(self.updateROI) #==============================================================================", "the X size # Rectangular ROI used to enable more", "self.btnLayout.addWidget(self.exit_btn,2,1) # Add button layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format", "relevant video position and ROI self.getSeqValsAndCurrROI() # 0 or 90", "eggRotBBox.copy() # #self.originalEggBoxPoints = eggBoxPoints.copy() # #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] #", "= self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #============================================================================== #============================================================================== # Close button", "#============================================================================== # Generate a pyqtgraph ROI, using data from OpenCV.", "self.tableData = {'Embryo':list(self.embryoLabels), 'ROI approved':['No'] * len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]*", "Update the user interface #============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq)", "= out-out.min() self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T #============================================================================== #", "# #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # #", "__init__(self, parent=None): super(eggUI, self).__init__(parent) # Make QDialog self.diag = QtGui.QDialog()", "= pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi =", "= cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran = (im.max()-im.min())/255. # out = (im/ran)", "= np.zeros_like(ims) for l in range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T #==============================================================================", "updateOpenCVEggROICurrEmbryo # # Remove previous # self.diag.imv.removeItem(self.roi) # # Get", "out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T #============================================================================== # Update image iteratively when", "1) checkLayout.setRowStretch(1, 3) # Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) #", "= (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles() hh", "self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan, np.nan, np.nan]", "previous if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position", "for current frame if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1]))", "ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI) #", "= out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T #============================================================================== # Update image iteratively", "as anchor to calculate the Euclidean distance between the #", "update=True) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) #", "0.0): self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi =", "# Make QDialog self.diag = QtGui.QDialog() global parentPath, vidTime self.diag.setWindowTitle('Identify", "self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit - prompt user to confirm #self.exit_btn.clicked.connect(self.closeEvent) #", "self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make tableview self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo',", "a starting point. #if len(self.updatedEggROI) == 0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle)", "for h in hh] # Handle on each corner. Get", "self.dataForTable() # Function for modifying the table when ROI is", "int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else:", "(self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0): self.roi = pg.ROI([bl[0], bl[1]],", "= QGridLayout() # Deal with stretching for approrpraite formatting. checkLayout.setColumnStretch(0,", "self.diag.imv.removeItem(self.roi) # Get relevant video position and ROI. self.getSeqValsAndCurrROI() #", "eggBBAng == -90: eggBBAng = -89 elif eggBBAng == -180:", "self.tSeqd[l] = ims[l].T #============================================================================== # Get folders for a particular", "= glob.glob(parentPath + \"*/\" + embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== #", "correct ROI is created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== #", "self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy() #============================================================================== # Get index", "bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4] = -1", "dataHandling class #============================================================================== def formatSequence(self,ims): # Format seq appropriately for", "'True' # Make var for dealing with modifications to roi", "X size # Rectangular ROI used to enable more easy", "eggBBAng == -180: eggBBAng = -179 elif eggBBAng == -0:", "self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints #============================================================================== # Assign nan to current", "Make var for dealing with modifications to roi self.updatedEggROI=[] ###", "# # Use bottom-left coordinate as anchor to calculate the", "int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints", "approved':['No'] * len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter data", "eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add the", "images for pyqtgraph and put in ImageView # self.formatSequence(ims) self.imImport()", "(self.originalEggRotBBox[4] == -90.0) | (self.originalEggRotBBox[4] == -0.0)| (self.originalEggRotBBox[4] == 0.0):", "self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40);", "# bottomMost = ySorted[:2, :] # topMost = ySorted[2:, :]", "changeScaleX = roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1] changeAngle = roiChanges.getAngle() #", "= QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal() def __init__(self,", "self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No Egg')", "from dataHandling instance. Called from showUI and updateUI. #============================================================================== def", "Get handle positions self.xyPosHandles =[] for h in hh: self.xyPosHandles.append([h.x(),h.y()])", "= QtGui.QMessageBox.question(self, 'Message', # quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # # if", "changeScaleX, changeScaleY, changeAngle # Get changes to ROI scale, angle", "sequence.. if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW,", "(tl, tr) = topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2],", "enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem) # Add", "* import sys import cv2 import pandas as pd from", "= pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4]", "ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== # Deal with data from the dataHandling", "- note non 0,or 90 degree angles, require different of", ":] (bl, br) = bottomMost # Use bottom-left coordinate as", "self.roi.removable self.roi.invertible = 'True' # Make var for dealing with", "#============================================================================== def updateROI(self): #global vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX,", "D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] # (tl, tr) = topMost[np.argsort(D)[::-1],", "indices for current frame if self.eggInt != 1234: self.divVal =", "position roiChanges = self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1]", "import os from PyQt5 import QtGui from PyQt5.QtCore import *", "#global vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX, changeScaleY,", "= int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #==============================================================================", "QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make button", "self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] != 'nan'): # 0 or 90 degree", "np.nan, np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] =", "pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug # print 'no angle'", "0]) self.roi.addScaleHandle([0, 0], [1, 1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible = 'True'", "parentPath self.embryo = embryo self.embryoFolders = glob.glob(parentPath + \"*/\" +", "use window x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) # Add button layout", "createOpenCVEggROI : take eggID defined ROIs and visualise ''' sliderUpdate", "= [] for n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m,", "Make QDialog self.diag = QtGui.QDialog() global parentPath, vidTime self.diag.setWindowTitle('Identify eggs')", "= cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if eggBBAng == -90: eggBBAng =", "= topMost[np.argsort(D)[::-1], :] self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else:", "[np.nan, np.nan, np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal]", "if eggBBAng == -90: eggBBAng = -89 elif eggBBAng ==", ":] # (bl, br) = bottomMost # # Use bottom-left", "h.pos()) for h in hh] # Handle on each corner.", "item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem)", "pg #from PyQt4.Qt import * #%% class eggUI(QDialog): ''' createOpenCVEggROI", "def updateOpenCVEggROICurrEmbryo(self): # Remove previous if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) #", "ySorted[:2, :] # topMost = ySorted[2:, :] # # Get", "buggy. Shift to 1 and 89 as a bodge fix.", "ROI self.diag.imv.removeItem(self.roi) # Store nans in place of ROI if", "than one frame eggID per sequence.. if self.eggInt != 1234:", "the # # The point with the largest distance will", "self.originalEggBoxPoints else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal] =", "roiChanges.getAngle() # Update ROI, either updating the previously updated or", "#============================================================================== # #self.originalEggRotBBox = eggRotBBox.copy() # #self.originalEggBoxPoints = eggBoxPoints.copy() #", "'no angle' else: # Random angle ROIs self.roi = pg.ROI([bottomMost[0][0],", "self.compSeq[f].T #============================================================================== # Update image iteratively when slider moved #==============================================================================", "points bottomMost = ySorted[:2, :] topMost = ySorted[2:, :] #", "self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate a pyqtgraph ROI, using data from", "= -179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom", "= QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn", "to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply layout self.diag.setLayout(checkLayout) # Make", "#============================================================================== # Close button - not implemented (hidden) #============================================================================== #==============================================================================", "self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # # Modified version", "dtype=np.int32) ) if eggBBAng == -90: eggBBAng = -89 elif", "self.roi.addScaleHandle([1, 1], [0, 0]) self.roi.addScaleHandle([0, 0], [1, 1]) self.roi.setPen('y',width=3) self.roi.removable", "#============================================================================== # Import OpenCV determined ROIs from dataHandling instance. Called", "table when approve ROI button clicked. #============================================================================== def updateTable(self): self.tableData['ROI", "-179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom most,", "cv2 import pandas as pd from PyQt5.Qt import * import", "[0, 0]) self.roi.addScaleHandle([0, 0], [1, 1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible =", "Add the ROI to ImageItem self.diag.show() # Call function to", "ROI to embryo sequence ROIs if self.eggInt != 1234: self.divVal", "ROIs # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4],", "Get relevant video position and ROI self.getSeqValsAndCurrROI() # 0 or", "from pyqtgraph.Qt import QtCore, QtGui import numpy as np from", "# Make buttons self.cpROI_btn = QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn =", "current ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign", "self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() # #======== #============================================================================== #============================================================================== # ROI functions", "calculate the Euclidean distance between the # # The point", "{'Embryo':list(self.embryoLabels), 'ROI approved':['No'] * len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) #", "updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add", "#============================================================================== #============================================================================== # Import OpenCV determined ROIs from dataHandling instance.", "self.getSeqValsAndCurrROI() # 0 or 90 degree angles seem very buggy.", "#from PyQt4.Qt import * #%% class eggUI(QDialog): ''' createOpenCVEggROI :", "-0.0)| (self.originalEggRotBBox[4] == 0.0): # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2],", "eggRotBBox[vidTime][3]]) # # Add handles # self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0,", "imImport(self): for f in range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran =", "Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn =", "(eggBBW, eggBBH), eggBBAng)) #============================================================================== # Copy ROI on button click.", "self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders = [] for n, key in", "user chagnges. if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)|", "#else: #self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(),", "# #============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt): self.eggInt = eggInt", "= pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # # roi = pg.EllipseROI([bottomMost[0][0],", "self.noEgg_btn = QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40);", "#============================================================================== def getSeqValsAndCurrROI(self): # Calculate the indices for current frame", "# Handle on each corner. Get handle positions self.xyPosHandles =[]", "of corner handles for tracking user chagnges. if (self.currROI_eggRotBBox[4] ==", "#============================================================================== def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120)", "self.roi.addRotateHandle([0, 1], [0.5,0.5]) # self.roi.addScaleHandle([1, 1], [0, 0]) # self.roi.addScaleHandle([0,", "# # Make var for dealing with modifications to roi", "self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove", "1 and 89 as a bodge fix. if self.currROI_eggRotBBox[4] ==", "update=True) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add", "be our bottom-right point # D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0]", "eggInt self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify eggs') # Make ImageView self.diag.imv", "modifications to roi self.updatedEggROI=[] ### Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI)", "== -180: #self.currROI_eggRotBBox[4] = -179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]", "be our bottom-right point D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl,", "# Update the user interface #============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport()", "box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom", "for modifying the table when ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable) #", "dealing with modifications to roi # self.updatedEggROI=[] # ### Still", "checkLayout.addWidget(self.diag.table,1,5) # Apply layout self.diag.setLayout(checkLayout) # Make buttons self.cpROI_btn =", "the ROI for current embryo. #============================================================================== def updateOpenCVEggROICurrEmbryo(self): # Remove", "range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T #============================================================================== # Get folders for a", "degree angles seem very buggy. Shift to 1 and 89", "= self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate a pyqtgraph ROI, using data", "(self.currROI_eggRotBBox[4] == 0.0): self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) #", "(tl, tr) = topMost[np.argsort(D)[::-1], :] # Make ROI - note", "== -0.0)| (self.originalEggRotBBox[4] == 0.0): # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]],", "# self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) # Add the ROI to ImageItem", "for pyqtgraph and put in ImageView # self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq)", "# Update ROI for new embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self): #", "def recordNoEgg(self): # Remove ROI self.diag.imv.removeItem(self.roi) # Store nans in", "on button click. #============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store copied", "bottom most # bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] # (bl,", "closeEvent(self, event): # # quit_msg = \"Are you sure you", "The point with the largest distance will be our bottom-right", "Make var for dealing with modifications to roi # self.updatedEggROI=[]", "= eggBoxPoints.copy() # #self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal]", "# Make var for dealing with modifications to roi #", "between the # The point with the largest distance will", "(topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) # Add button layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5)", "[np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan, np.nan,", "-180: eggBBAng = -179 elif eggBBAng == -0: eggBBAng =", "largest distance will be our bottom-right point D = dist.cdist(bl[np.newaxis],", ":] # # Make ROI - note non 0,or 90", "and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate data for populating", "ROI, either updating the previously updated or taking the unaltered", "you sure you want to exit the program?\" # reply", "# self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im", "self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders = []", "cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) #============================================================================== # Copy ROI on", "distance will be our bottom-right point D = dist.cdist(bl[np.newaxis], topMost,", "updateROI(self): #global vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI, changeX, changeY, changeScaleX,", "def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() #", "functions #============================================================================== #============================================================================== # Import OpenCV determined ROIs from dataHandling", "corner handles for tracking user chagnges. # if (self.originalEggRotBBox[4] ==", "topMost = ySorted[2:, :] # # Get bottom most #", "bottom most, and top most sorted corner points # bottomMost", "out = (im/ran) out = out-out.min() self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f]", "bounding box points # ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] #", "vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv = pg.ImageView() self.btn_save = QPushButton('Save', self)", "roi self.updatedEggROI=[] ### Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== #", "# Add handles # self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1], [0.5,0.5])", "ROI button clicked. #============================================================================== def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] = 'Approved'", "self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex())) # im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran =", "1], [0.5,0.5]) self.roi.addScaleHandle([1, 1], [0, 0]) self.roi.addScaleHandle([0, 0], [1, 1])", "hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in hh] # Handle", "points # bottomMost = ySorted[:2, :] # topMost = ySorted[2:,", "in timeline so correct ROI is created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo)", "Table self.diag.table.resizeRowsToContents() #============================================================================== # Update the user interface #============================================================================== def", "= embryo self.embryoFolders = glob.glob(parentPath + \"*/\" + embryo +\"/\")", "roi # self.updatedEggROI=[] # ### Still to do... # self.diag.imv.addItem(self.roi)", "#============================================================================== # Get index values for ROI data. #============================================================================== def", "if 'No Egg' button clicked #============================================================================== def recordNoEgg(self): # Remove", "= ims[l].T #============================================================================== # Get folders for a particular embryo", "table when ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy current ROI", "0]) # self.roi.addScaleHandle([0, 0], [1, 1]) # self.roi.setPen('y',width=3) # self.roi.removable", "Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1, 1], [0,", "embryo/approveROI table. #============================================================================== def dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels), 'ROI approved':['No']", "# # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else:", ":] # # Get bottom most # bottomMost = bottomMost[np.argsort(bottomMost[:,", "1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal", "angles seem very buggy. Shift to 1 and 89 as", "embryoLabels self.diag.setWindowTitle('Identify eggs') # Make ImageView self.diag.imv = pg.ImageView() self.diag.resize(1000,600)", "in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m, item in enumerate(self.tableData[key]): newitem =", "the largest distance will be our bottom-right point # D", "and 89 as a bodge fix. if self.currROI_eggRotBBox[4] == -90:", "and top most sorted corner points # bottomMost = ySorted[:2,", "version of updateOpenCVEggROICurrEmbryo # # Remove previous # self.diag.imv.removeItem(self.roi) #", "#============================================================================== # Update image iteratively when slider moved #============================================================================== #==============================================================================", "out = (im/ran) # out = out-out.min() # self.compSeq[self.diag.imv.currentIndex] =", "most bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] (bl, br) = bottomMost", "Modified version of updateOpenCVEggROICurrEmbryo # # Remove previous # self.diag.imv.removeItem(self.roi)", "showUI and updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox = eggRotBBox", "checkLayout.addLayout(self.btnLayout,0,5) # Format images for pyqtgraph and put in ImageView", "self.cpROI_btn = QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40); self.useCpROI_btn = QtGui.QPushButton('&Use Copied ROI')", "# self.diag.imv.removeItem(self.roi) # # Get relevant video position and ROI.", "stretching for approrpraite formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1)", "eggs') self.diag.imv = pg.ImageView() self.btn_save = QPushButton('Save', self) #============================================================================== #", "rotated bounding box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] #", "self.parentPath = parentPath self.embryo = embryo self.embryoFolders = glob.glob(parentPath +", "containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit - prompt user to confirm", "either updating the previously updated or taking the unaltered ROI", "np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal]", "Import OpenCV determined ROIs from dataHandling instance. Called from showUI", "[] for n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m, item", "horHeaders.append(key) for m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m,", "Save updated # If more than one frame eggID per", "=[] for h in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW, eggBBH),", "bodge fix. if self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4] = -89 #", "-roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1]", "angle ROIs # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) #", "import * from PyQt5.QtGui import * import sys import cv2", "Make ImageView self.diag.imv = pg.ImageView() self.diag.resize(1000,600) # Make ROI self.importOpenCVROIs(eggRotBBox,", "Get changes to ROI scale, angle and position roiChanges =", "ImageItem self.diag.show() # Call function to add data self.dataForTable() #", "bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])", "ROI data. #============================================================================== def getSeqValsAndCurrROI(self): # Calculate the indices for", "more than one frame eggID per sequence.. if self.eggInt !=", "self.originalEggRotBBox[3]]) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) #", "different of the X size # # Rectangular ROI used", "data just on the first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight", "# Get bottom most, and top most sorted corner points", "corner points bottomMost = ySorted[:2, :] topMost = ySorted[2:, :]", "self.importOpenCVROIs(eggRotBBox, eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add the ROI to ImageItem", "# Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI)", "folders for a particular embryo #============================================================================== def getEmbryoFolders(self, parentPath, embryo):", "1]), :] # # Get bottom most, and top most", "h in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng =", "= pg.ImageView() self.diag.resize(1000,600) # Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0]", "= self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1] changeScaleX =", "tr) = topMost[np.argsort(D)[::-1], :] # # Make ROI - note", "embryo. #============================================================================== def updateOpenCVEggROINewEmbryo(self): # Remove old ROI if (hasattr(self,", "# # Get bottom most, and top most sorted corner", "# # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # #", "self.roi.addScaleHandle([0, 0], [1, 1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible = 'True' #", "self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy() #==============================================================================", "copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan to frames not containing", "you want to exit the program?\" # reply = QtGui.QMessageBox.question(self,", "ROIs self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) #", "layout self.diag.setLayout(checkLayout) # Make buttons self.cpROI_btn = QtGui.QPushButton('&Copy ROI') self.cpROI_btn.setMinimumHeight(40);", "def imImport(self): for f in range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran", "determined ROIs from dataHandling instance. Called from showUI and updateUI.", "= (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for", "== -0: #self.currROI_eggRotBBox[4] = -1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :]", "onto Table horHeaders = [] for n, key in enumerate(sorted(self.tableData.keys())):", "the Euclidean distance between the # # The point with", "0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1, 1], [0, 0]) self.roi.addScaleHandle([0, 0],", "# Get relevant video position and ROI. # self.getSeqValsAndCurrROI() #", "using data from OpenCV. #============================================================================== def createOpenCVEggROI(self): # Get relevant", "self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1, 1], [0, 0]) self.roi.addScaleHandle([0,", "if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] ==", "bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4] = -179", "[eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug # print 'no angle' else: #", "# Assign nan to frames not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) #", "clicked #============================================================================== def recordNoEgg(self): # Remove ROI self.diag.imv.removeItem(self.roi) # Store", "# # Make ROI - note non 0,or 90 degree", "self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify eggs') # Make ImageView self.diag.imv =", "self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal =", "handles # self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1], [0.5,0.5]) # self.roi.addScaleHandle([1,", "roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1] changeAngle = roiChanges.getAngle() # Update ROI,", "dealing with modifications to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== #", "== -180: eggBBAng = -179 elif eggBBAng == -0: eggBBAng", "# Get relevant sequence position and ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0]", "# Format seq appropriately for pyqtgraph ROIs self.tSeqd = np.zeros_like(ims)", "= 'True' # # Make var for dealing with modifications", "frames not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit - prompt user", "bottom-right point # D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] # (tl,", "-89 # Get rotated bounding box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:,", "self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI. #============================================================================== def updateROI(self): #global", "handling of corner handles for tracking user chagnges. # if", "pyqtgraph and put in ImageView # self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) #", "= self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints =", "np from scipy.spatial import distance as dist import glob import", "out-out.min() self.compSeq[int(f)] = out.astype(np.uint8) self.compSeq[f] = self.compSeq[f].T #============================================================================== # Update", "for populating the embryo/approveROI table. #============================================================================== def dataForTable(self): self.tableData =", "first row self.diag.table.selectRow(0) # Make layout checkLayout = QGridLayout() #", "confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect changes in timeline so correct ROI", "# # Random angle ROIs # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]],", "changeX = -roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0] changeScaleY", "[self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) #", "glob.glob(parentPath + \"*/\" + embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get", "# Function for modifying the table when ROI is approved.", "(eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if", "self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) )", "+\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get image #============================================================================== def imImport(self): for", "= -89 elif eggBBAng == -180: eggBBAng = -179 elif", "pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2],", "corner points # bottomMost = ySorted[:2, :] # topMost =", "ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan to frames not containing egg", "visualise ''' sliderUpdate = QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal() keyPressed =", "self.originalEggBoxPoints = eggBoxPoints.copy() #============================================================================== # Get index values for ROI", "self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size of Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #==============================================================================", "eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH),", "to 1 and 89 as a bodge fix. if self.currROI_eggRotBBox[4]", "row self.diag.table.selectRow(0) # Make layout checkLayout = QGridLayout() # Deal", "degree angles, require different of the X size # Rectangular", "= [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY),", "button - not implemented (hidden) #============================================================================== #============================================================================== # def closeEvent(self,", ":] # # Get bottom most, and top most sorted", "= QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem) # Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders)", "most, and top most sorted corner points bottomMost = ySorted[:2,", "Sets different alignment data just on the first column self.diag.table.setRowCount(int(len(self.embryoLabels)))", "elif eggBBAng == -180: eggBBAng = -179 elif eggBBAng ==", "QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size", "point with the largest distance will be our bottom-right point", "= self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else: self.divVal = self.diag.imv.currentIndex self.intDivVal", "calculate the Euclidean distance between the # The point with", "ROI. #============================================================================== def updateROI(self): #global vidTime, xyPosHandles, ellipse, changeAngle, roiChanges,updatedEggROI,", "scipy.spatial import distance as dist import glob import re import", "= self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #============================================================================== #==============================================================================", "roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Add handles self.roi.addRotateHandle([1,", "very buggy. Shift to 1 and 89 as a bodge", "position and ROI. self.getSeqValsAndCurrROI() # 0 or 90 degree angles", "Get bottom most # bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] #", "PyQt5.QtCore import * from PyQt5.QtGui import * import sys import", "ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn = QtGui.QPushButton('&Approve", "self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX,", "(self.currROI_eggRotBBox[0] != 'nan'): # 0 or 90 degree angles seem", "a bodge fix. if self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4] = -89", "= pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True) # #", "self.diag.imv.addItem(self.roi) # Remove buttons from ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() #", "cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if eggBBAng == -90: eggBBAng = -89", "roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # # Add handles", "# Exit button not implemented, just use window x (topRight).", "-0: eggBBAng = -1 # Save updated # If more", "user interface #============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox, eggBoxPoints)", "pandas as pd from PyQt5.Qt import * import pyqtgraph as", "hh = self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in", "= [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in hh] # Handle on", "[self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: # Get rotated bounding box points ySorted", "Get bottom most, and top most sorted corner points bottomMost", "on each corner. Get handle positions self.xyPosHandles =[] for h", "handles for tracking user chagnges. # if (self.originalEggRotBBox[4] == -90.0)", "data from OpenCV. #============================================================================== def createOpenCVEggROI(self): # Get relevant sequence", "# # Get bottom most # bottomMost = bottomMost[np.argsort(bottomMost[:, 1]),", "is approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply", "embryoLabels, eggInt): self.eggInt = eggInt self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify eggs')", "### Still to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI.", "pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # # Add handles # self.roi.addRotateHandle([1,", "def __init__(self, parent=None): super(eggUI, self).__init__(parent) # Make QDialog self.diag =", "Adjust size of Table self.diag.table.resizeRowsToContents() # self.diag.table.resizeColumnsToContents() #============================================================================== # Update", "re import os from PyQt5 import QtGui from PyQt5.QtCore import", "def getEmbryoFolders(self, parentPath, embryo): self.parentPath = parentPath self.embryo = embryo", "0.0): # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # #", "# Remove previous # self.diag.imv.removeItem(self.roi) # # Get relevant video", "roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1] changeAngle = roiChanges.getAngle()", "= pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else: # # Random", "GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format images for pyqtgraph and put in", "distance between the # # The point with the largest", "# #======== #============================================================================== #============================================================================== # ROI functions #============================================================================== #============================================================================== #", "handling of corner handles for tracking user chagnges. if (self.currROI_eggRotBBox[4]", "# Debug # print 'no angle' else: # Random angle", "for m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m,", "self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #==============================================================================", "implemented, just use window x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) # Add", "= [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] self.eggRotBBox[0,self.intDivVal] = [np.nan, np.nan,", "90 degree angles seem very buggy. Shift to 1 and", "OpenCV determined ROIs from dataHandling instance. Called from showUI and", "= cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) # Otherwise just save", "formatSequence(self,ims): # Format seq appropriately for pyqtgraph ROIs self.tSeqd =", "QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets different alignment data just", "updated or taking the unaltered ROI from OpenCV as a", "if (eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove buttons from", "if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [eggBBX, eggBBY, eggBBW, eggBBH,", "eggBoxPoints) self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add the ROI to ImageItem #self.diag.imv.addItem(self.roi)", "# else: # event.ignore() # #============================================================================== #============================================================================== # #self.originalEggRotBBox =", "self.updatedEggROI=[] # ### Still to do... # self.diag.imv.addItem(self.roi) # self.roi.sigRegionChangeFinished.connect(self.updateROI)", "ImageView self.diag.imv = pg.ImageView() self.diag.resize(1000,600) # Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints)", "QDialog self.diag = QtGui.QDialog() global parentPath, vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv", "if (self.currROI_eggRotBBox[0] != 'nan'): # 0 or 90 degree angles", "ran = (im.max()-im.min())/255. out = (im/ran) out = out-out.min() self.compSeq[int(f)]", "''' sliderUpdate = QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal()", "# # Remove previous # self.diag.imv.removeItem(self.roi) # # Get relevant", "'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video position and ROI self.getSeqValsAndCurrROI()", "do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI for new embryo.", "+ embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get image #============================================================================== def", "video position and ROI. self.getSeqValsAndCurrROI() # 0 or 90 degree", "Copy ROI on button click. #============================================================================== def cpROI(self): self.originalEggRotBBox =", "If more than one frame eggID per sequence.. if self.eggInt", "self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove buttons from ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide()", "previous # self.diag.imv.removeItem(self.roi) # # Get relevant video position and", "self.currROI_eggRotBBox[3]]) else: # Get rotated bounding box points ySorted =", "# # Modified version of updateOpenCVEggROICurrEmbryo # # Remove previous", "item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m])", "pyqtgraph ROIs self.tSeqd = np.zeros_like(ims) for l in range(len(self.tSeqd)): self.tSeqd[l]", "# # Get rotated bounding box points # ySorted =", "self.updateOpenCVEggROINewEmbryo() # Add the ROI to ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== #", "positions self.xyPosHandles =[] for h in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY),", "bottomMost[np.argsort(bottomMost[:, 1]), :] (bl, br) = bottomMost # Use bottom-left", "embryoUpdate = QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal() def __init__(self, parent=None): super(eggUI,", "int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== #", "self.diag.imv.setImage(self.compSeq) # Add the ROI to ImageItem self.diag.show() # Call", "#============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt): self.eggInt = eggInt self.embryoLabels", "eggBBY), (eggBBW, eggBBH), eggBBAng)) #============================================================================== # Copy ROI on button", "data onto Table horHeaders = [] for n, key in", "np.nan, np.nan, np.nan] #============================================================================== # Copy ROI on button click.", "with data from the dataHandling class #============================================================================== def formatSequence(self,ims): #", "the dataHandling class #============================================================================== def formatSequence(self,ims): # Format seq appropriately", "of the X size # # Rectangular ROI used to", "self).__init__(parent) # Make QDialog self.diag = QtGui.QDialog() global parentPath, vidTime", "self.diag.imv.ui.menuBtn.hide() # Make tableview self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted'])", "Egg' button clicked #============================================================================== def recordNoEgg(self): # Remove ROI self.diag.imv.removeItem(self.roi)", "= out.astype(np.uint8) # self.diag.imv.setImage(self.compSeq.T) # self.diag.imv.show() # #======== #============================================================================== #==============================================================================", "approved'][self.diag.table.currentRow()] = 'Approved' self.tableCols[self.diag.table.currentRow()] = QtGui.QColor(0,100,0,120) horHeaders = [] for", "add data self.dataForTable() # Function for modifying the table when", "data from the dataHandling class #============================================================================== def formatSequence(self,ims): # Format", "quit_msg = \"Are you sure you want to exit the", "import numpy as np from scipy.spatial import distance as dist", "Deal with stretching for approrpraite formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1)", "[QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter data onto Table horHeaders = []", "# # Add handles # self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1],", "self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints self.updateOpenCVEggROICurrEmbryo() #============================================================================== # #============================================================================== #============================================================================== # Close", "# # # Modified version of updateOpenCVEggROICurrEmbryo # # Remove", "layout self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit", "[-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # # Add handles # self.roi.addRotateHandle([1, 0],[0.5,0.5]) #", "ims[l].T #============================================================================== # Get folders for a particular embryo #==============================================================================", "!= 'nan'): # 0 or 90 degree angles seem very", "ImageView # self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) # Add the ROI to", "changeY = roiChanges.getTranslation()[1] changeScaleX = roiChanges.getScale()[0] changeScaleY = roiChanges.getScale()[1] changeAngle", "bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: # Get rotated bounding box points", "modifying the table when ROI is approved. self.approveROI_btn.clicked.connect(self.updateTable) # Copy", "# Get bottom most # bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :]", "# Remove buttons from ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make", "# self.roi.setPen('y',width=3) # self.roi.removable # self.roi.invertible = 'True' # #", "to enable more easy handling of corner handles for tracking", "if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan, np.nan,", "of corner handles for tracking user chagnges. # if (self.originalEggRotBBox[4]", "ROI if 'No Egg' button clicked #============================================================================== def recordNoEgg(self): #", "Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size of Table self.diag.table.resizeRowsToContents() #", "most sorted corner points bottomMost = ySorted[:2, :] topMost =", "bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug # print 'no angle' else:", "newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem) # Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust", "Update image iteratively when slider moved #============================================================================== #============================================================================== # def", "1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible = 'True' # Make var for", "as dist import glob import re import os from PyQt5", "self.embryoFolders = glob.glob(parentPath + \"*/\" + embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #==============================================================================", "def formatSequence(self,ims): # Format seq appropriately for pyqtgraph ROIs self.tSeqd", "require different of the X size # Rectangular ROI used", "# # Rectangular ROI used to enable more easy handling", "= eggInt self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify eggs') # Make ImageView", "ran = (im.max()-im.min())/255. # out = (im/ran) # out =", "roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug # print", "updateOpenCVEggROINewEmbryo(self): # Remove old ROI if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) #", "eggBBY), (eggBBW, eggBBH), eggBBAng = cv2.minAreaRect(np.array(self.xyPosHandles, dtype=np.int32) ) if eggBBAng", "angle and position roiChanges = self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0] changeY", "QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem) # Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #", "# Modified version of updateOpenCVEggROICurrEmbryo # # Remove previous #", "else: # Get rotated bounding box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:,", "self.xyPosHandles =[] for h in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX, eggBBY), (eggBBW,", "= self.currROI_eggBoxPoints #============================================================================== # Assign nan to current ROI if", "to ROI scale, angle and position roiChanges = self.roi.getGlobalTransform() changeX", "bottom-left coordinate as anchor to calculate the Euclidean distance between", "changeScaleY, changeAngle # Get changes to ROI scale, angle and", "Make tableview self.diag.table = QtGui.QTableWidget() self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets", "= int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints", "glob import re import os from PyQt5 import QtGui from", "eggRotBBox self.eggBoxPoints = eggBoxPoints self.originalEggRotBBox = eggRotBBox.copy() self.originalEggBoxPoints = eggBoxPoints.copy()", "# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # else: #", "roiChanges.getScale()[1] changeAngle = roiChanges.getAngle() # Update ROI, either updating the", "self.diag.setWindowTitle('Identify eggs') # Make ImageView self.diag.imv = pg.ImageView() self.diag.resize(1000,600) #", "for dealing with modifications to roi # self.updatedEggROI=[] # ###", "to exit the program?\" # reply = QtGui.QMessageBox.question(self, 'Message', #", "easy handling of corner handles for tracking user chagnges. if", "ROI to ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== # Deal with data from", "PyQt5 import QtGui from PyQt5.QtCore import * from PyQt5.QtGui import", "the user interface #============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints): self.imImport() self.diag.imv.setImage(self.compSeq) self.importOpenCVROIs(eggRotBBox,", "import * #%% class eggUI(QDialog): ''' createOpenCVEggROI : take eggID", "as np from scipy.spatial import distance as dist import glob", "self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]])", "# quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # # if reply == QtGui.QMessageBox.Yes:", "Add handles # self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1], [0.5,0.5]) #", "quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # # if reply == QtGui.QMessageBox.Yes: #", "bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] (bl, br) = bottomMost #", "bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5])", "#self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # #", "layout checkLayout = QGridLayout() # Deal with stretching for approrpraite", "changeAngle # Get changes to ROI scale, angle and position", "with modifications to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #============================================================================== # Update", "# def closeEvent(self, event): # # quit_msg = \"Are you", "sure you want to exit the program?\" # reply =", "= QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40);", "= bottomMost[np.argsort(bottomMost[:, 1]), :] (bl, br) = bottomMost # Use", "dealing with modifications to roi self.updatedEggROI=[] ### Still to do...", "else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox", "just use window x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) # Add button", "pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True) # # roi", "# Get rotated bounding box points ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]),", "self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan to", "data self.dataForTable() # Function for modifying the table when ROI", "def cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints #============================================================================== #", "[1, 1]) # self.roi.setPen('y',width=3) # self.roi.removable # self.roi.invertible = 'True'", "of Table self.diag.table.resizeRowsToContents() #============================================================================== # Update the user interface #==============================================================================", "to do... self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI. #============================================================================== def", "#self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # # Modified version of updateOpenCVEggROICurrEmbryo", "Euclidean distance between the # The point with the largest", "#============================================================================== def formatSequence(self,ims): # Format seq appropriately for pyqtgraph ROIs", "for dealing with modifications to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else: #==============================================================================", ":] # topMost = ySorted[2:, :] # # Get bottom", "# Make ImageView self.diag.imv = pg.ImageView() self.diag.resize(1000,600) # Make ROI", "pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # Add handles self.roi.addRotateHandle([1,", "Get relevant sequence position and ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] !=", "self.currROI_eggRotBBox[4] == -180: #self.currROI_eggRotBBox[4] = -179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]),", "self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan to frames not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg)", "QtCore.pyqtSignal() def __init__(self, parent=None): super(eggUI, self).__init__(parent) # Make QDialog self.diag", "self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal = self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.currROI_eggRotBBox =", "to roi # self.updatedEggROI=[] # ### Still to do... #", "'ROI approved':['No'] * len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter", "relevant video position and ROI. self.getSeqValsAndCurrROI() # 0 or 90", "#self.updatedEggROI = (((self.updatedEggROI[0][0]-changeX),(self.updatedEggROI[0][1]+changeY)),((max((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY))),(min((self.updatedEggROI[1][0]*changeScaleX),(self.updatedEggROI[1][1]*changeScaleY)))),self.updatedEggROI[2]+changeAngle) hh = self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos())", "implemented (hidden) #============================================================================== #============================================================================== # def closeEvent(self, event): # #", "for n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m, item in", "in place of ROI if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] =", "= dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] self.roi", "simply else: self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal]", "Deal with data from the dataHandling class #============================================================================== def formatSequence(self,ims):", "self.diag.table.resizeRowsToContents() #============================================================================== # Update the user interface #============================================================================== def updateUI(self,ims,eggRotBBox,", "from showUI and updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox =", "embryo sequence ROIs if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1]))", "1], [0, 0]) self.roi.addScaleHandle([0, 0], [1, 1]) self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible", "for ROI data. #============================================================================== def getSeqValsAndCurrROI(self): # Calculate the indices", "self.currROI_eggRotBBox[3]]) elif self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4] = -1 ySorted =", "# Get relevant video position and ROI. self.getSeqValsAndCurrROI() # 0", "#============================================================================== # Copy ROI on button click. #============================================================================== def applyCopiedROI(self):", "non 0,or 90 degree angles, require different of the X", "var for dealing with modifications to roi self.updatedEggROI=[] self.roi.sigRegionChangeFinished.connect(self.updateROI) #else:", "eggID defined ROIs and visualise ''' sliderUpdate = QtCore.pyqtSignal() embryoUpdate", "= QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal() def __init__(self, parent=None): super(eggUI, self).__init__(parent)", "+ \"*/\" + embryo +\"/\") self.embryoFolders.sort(key=os.path.getctime) #============================================================================== # Get image", "\"Are you sure you want to exit the program?\" #", "eggBBAng = -1 # Save updated # If more than", "eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH),", "self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of Table self.diag.table.resizeRowsToContents() #============================================================================== # Update the", "Add the ROI to ImageItem #self.diag.imv.addItem(self.roi) #============================================================================== # Deal with", "# self.roi.addScaleHandle([0, 0], [1, 1]) # self.roi.setPen('y',width=3) # self.roi.removable #", "pg.ImageView() self.diag.resize(1000,600) # Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0] !=", "place of ROI if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan,", "eggBoxPoints) if (eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove buttons", "= roiChanges.getAngle() # Update ROI, either updating the previously updated", "relevant sequence position and ROI. self.getSeqValsAndCurrROI() if (self.currROI_eggRotBBox[0] != 'nan'):", "Euclidean distance between the # # The point with the", "#%% class eggUI(QDialog): ''' createOpenCVEggROI : take eggID defined ROIs", "== 0.0): # self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) #", "eggRotBBox[vidTime][3]]) # Debug # print 'no angle' else: # Random", "#============================================================================== #============================================================================== # def closeEvent(self, event): # # quit_msg =", "(im/ran) # out = out-out.min() # self.compSeq[self.diag.imv.currentIndex] = out.astype(np.uint8) #", "#============================================================================== # Update ROI. #============================================================================== def updateROI(self): #global vidTime, xyPosHandles,", "button click. #============================================================================== def cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints =", "import re import os from PyQt5 import QtGui from PyQt5.QtCore", "iteratively when slider moved #============================================================================== #============================================================================== # def updateImage(self): #", "approrpraite formatting. checkLayout.setColumnStretch(0, 3) checkLayout.setColumnStretch(1, 1) checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3)", "updateUI. #============================================================================== def importOpenCVROIs(self,eggRotBBox, eggBoxPoints): self.eggRotBBox = eggRotBBox self.eggBoxPoints =", "image iteratively when slider moved #============================================================================== #============================================================================== # def updateImage(self):", "import glob import re import os from PyQt5 import QtGui", "= (im.max()-im.min())/255. # out = (im/ran) # out = out-out.min()", "self.roi.getHandles() hh = [self.roi.mapToItem(self.diag.imv.getImageItem(), h.pos()) for h in hh] #", "enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item)", "n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for m, item in enumerate(self.tableData[key]):", "#============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store copied ROI to embryo", "the X size # # Rectangular ROI used to enable", "== -0: eggBBAng = -1 # Save updated # If", "self.eggInt = eggInt self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify eggs') # Make", "topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] # Make ROI", "to calculate the Euclidean distance between the # The point", "#======== #============================================================================== #============================================================================== # ROI functions #============================================================================== #============================================================================== # Import", "nan to frames not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit -", "to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format images for pyqtgraph and put", "to confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect changes in timeline so correct", "self.diag.show() # Call function to add data self.dataForTable() # Function", "eggBoxPoints.copy() #============================================================================== # Get index values for ROI data. #==============================================================================", "# (tl, tr) = topMost[np.argsort(D)[::-1], :] # # Make ROI", "np.nan, np.nan, np.nan] self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan] else: self.eggBoxPoints[0,self.intDivVal] = [np.nan,np.nan,np.nan,np.nan]", "= self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints #============================================================================== # Assign nan to", "old ROI if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video", "dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1], :] self.roi =", "point. #if len(self.updatedEggROI) == 0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI", "ROI if self.eggInt != 1234: self.eggRotBBox[self.intDivVal,self.withinSeqVal] = [np.nan, np.nan, np.nan,", "QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) # # if reply == QtGui.QMessageBox.Yes: # #event.accept()", "to frames not containing egg self.noEgg_btn.clicked.connect(self.recordNoEgg) # Exit - prompt", "# Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0] != 'nan'): self.createOpenCVEggROI()", "current frame if self.eggInt != 1234: self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal", "# self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.originalEggRotBBox[2], self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True)", "Get bottom most, and top most sorted corner points #", "= bottomMost # Use bottom-left coordinate as anchor to calculate", "and put in ImageView # self.formatSequence(ims) self.imImport() self.diag.imv.setImage(self.compSeq) # Add", "# Calculate the indices for current frame if self.eggInt !=", "button click. #============================================================================== def applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store copied ROI", "bottomMost # # Use bottom-left coordinate as anchor to calculate", "self.originalEggRotBBox[3]]) # self.roi.setAngle(self.originalEggRotBBox[4], update=True) # # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]],", "bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # # Add handles # self.roi.addRotateHandle([1, 0],[0.5,0.5])", "len(list(self.embryoLabels)) # Enter data onto Table horHeaders = [] for", "self.diag.resize(1000,600) # Make ROI self.importOpenCVROIs(eggRotBBox, eggBoxPoints) if (eggRotBBox[0][0][0] != 'nan'):", "self.roi.invertible = 'True' # Make var for dealing with modifications", "self.roi = pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: # Get rotated", "ROI. # self.getSeqValsAndCurrROI() # # Get rotated bounding box points", "#============================================================================== # Generate data for populating the embryo/approveROI table. #==============================================================================", "#if len(self.updatedEggROI) == 0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else: #self.updatedEggROI =", "recordNoEgg(self): # Remove ROI self.diag.imv.removeItem(self.roi) # Store nans in place", "elif eggBBAng == -0: eggBBAng = -1 # Save updated", "# self.roi.addRotateHandle([1, 0],[0.5,0.5]) # self.roi.addRotateHandle([0, 1], [0.5,0.5]) # self.roi.addScaleHandle([1, 1],", "* import pyqtgraph as pg #from PyQt4.Qt import * #%%", "self.roi.removable # self.roi.invertible = 'True' # # Make var for", "populating the embryo/approveROI table. #============================================================================== def dataForTable(self): self.tableData = {'Embryo':list(self.embryoLabels),", "# Highlight first row self.diag.table.selectRow(0) # Make layout checkLayout =", "horHeaders.append(key) for m, item in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120))", "(im.max()-im.min())/255. # out = (im/ran) # out = out-out.min() #", "handles for tracking user chagnges. if (self.currROI_eggRotBBox[4] == -90.0) |", "current ROI if 'No Egg' button clicked #============================================================================== def recordNoEgg(self):", "size # Rectangular ROI used to enable more easy handling", "self.diag.imv = pg.ImageView() self.btn_save = QPushButton('Save', self) #============================================================================== # #==============================================================================", "approve ROI button clicked. #============================================================================== def updateTable(self): self.tableData['ROI approved'][self.diag.table.currentRow()] =", "- prompt user to confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect changes in", "column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight first row self.diag.table.selectRow(0) # Make", "self.currROI_eggBoxPoints #============================================================================== # Assign nan to current ROI if 'No", "distance will be our bottom-right point # D = dist.cdist(bl[np.newaxis],", "-0: #self.currROI_eggRotBBox[4] = -1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] #", "Get rotated bounding box points # ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]),", "#============================================================================== # ROI functions #============================================================================== #============================================================================== # Import OpenCV determined", "np.nan] #============================================================================== # Copy ROI on button click. #============================================================================== def", "not implemented, just use window x (topRight). # self.btnLayout.addWidget(self.exit_btn,2,1) #", "= pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) else: # Get rotated bounding", "of updateOpenCVEggROICurrEmbryo # # Remove previous # self.diag.imv.removeItem(self.roi) # #", "updating the previously updated or taking the unaltered ROI from", "# #============================================================================== #============================================================================== # #self.originalEggRotBBox = eggRotBBox.copy() # #self.originalEggBoxPoints =", "eggBBAng = -89 elif eggBBAng == -180: eggBBAng = -179", "QGridLayout() # Deal with stretching for approrpraite formatting. checkLayout.setColumnStretch(0, 3)", "self.roi.setPen('y',width=3) self.roi.removable self.roi.invertible = 'True' # Make var for dealing", "eggInt): self.eggInt = eggInt self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify eggs') #", "self.eggRotBBox[0,self.intDivVal] = [eggBBX, eggBBY, eggBBW, eggBBH, eggBBAng] self.eggBoxPoints[0,self.intDivVal] = cv2.boxPoints(((eggBBX,", "in enumerate(self.tableData[key]): newitem = QtGui.QTableWidgetItem(item) newitem.setBackground(QtGui.QColor(0,0,100,120)) self.diag.table.setItem(m, n, newitem) #", "- self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.eggRotBBox[self.intDivVal,self.withinSeqVal] = self.originalEggRotBBox self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = self.originalEggBoxPoints else: self.divVal", "-90: eggBBAng = -89 elif eggBBAng == -180: eggBBAng =", "program?\" # reply = QtGui.QMessageBox.question(self, 'Message', # quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)", "#self.currROI_eggRotBBox[4] = -179 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get", "# 0 or 90 degree angles seem very buggy. Shift", "# reply = QtGui.QMessageBox.question(self, 'Message', # quit_msg, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) #", "if self.currROI_eggRotBBox[4] == -90: #self.currROI_eggRotBBox[4] = -89 # Get rotated", "def getSeqValsAndCurrROI(self): # Calculate the indices for current frame if", "starting point. #if len(self.updatedEggROI) == 0: self.updatedEggROI = (((self.currROI_eggRotBBox[0]-changeX),(self.currROI_eggRotBBox[1]+changeY)),((max((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY))),(min((self.currROI_eggRotBBox[3]*changeScaleX),(self.currROI_eggRotBBox[2]*changeScaleY)))),self.currROI_eggRotBBox[4]+changeAngle) #else:", "= dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] # (tl, tr) = topMost[np.argsort(D)[::-1], :]", "#============================================================================== # Assign nan to current ROI if 'No Egg'", "#============================================================================== #============================================================================== # Close button - not implemented (hidden) #==============================================================================", "sys import cv2 import pandas as pd from PyQt5.Qt import", "!= 'nan'): self.createOpenCVEggROI() self.diag.imv.addItem(self.roi) # Remove buttons from ImageView widget", "tracking user chagnges. if (self.currROI_eggRotBBox[4] == -90.0) | (self.currROI_eggRotBBox[4] ==", "# app.quit() # else: # event.ignore() # #============================================================================== #============================================================================== #", "position and ROI. # self.getSeqValsAndCurrROI() # # Get rotated bounding", "self.approveROI_btn.clicked.connect(self.updateTable) # Copy current ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied ROI", "self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter data onto Table horHeaders", "import QtCore, QtGui import numpy as np from scipy.spatial import", "[-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) #", "= pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # # Add handles #", "#============================================================================== # #============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt): self.eggInt =", "# quit_msg = \"Are you sure you want to exit", "pg.ImageView() self.btn_save = QPushButton('Save', self) #============================================================================== # #============================================================================== def showUI(self,ims,eggRotBBox,", "= (im.max()-im.min())/255. out = (im/ran) out = out-out.min() self.compSeq[int(f)] =", "horHeaders = [] for n, key in enumerate(sorted(self.tableData.keys())): horHeaders.append(key) for", "np.nan, np.nan, np.nan, np.nan] #============================================================================== # Copy ROI on button", "-179 elif eggBBAng == -0: eggBBAng = -1 # Save", "self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button not implemented, just use window x", "self.getSeqValsAndCurrROI() self.updateOpenCVEggROINewEmbryo() # Add the ROI to ImageItem #self.diag.imv.addItem(self.roi) #==============================================================================", "layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply layout self.diag.setLayout(checkLayout) # Make buttons", "<gh_stars>1-10 from pyqtgraph.Qt import QtCore, QtGui import numpy as np", "prompt user to confirm #self.exit_btn.clicked.connect(self.closeEvent) # Connect changes in timeline", "handle positions self.xyPosHandles =[] for h in hh: self.xyPosHandles.append([h.x(),h.y()]) (eggBBX,", "self.eggBoxPoints[self.intDivVal,self.withinSeqVal] = cv2.boxPoints(((eggBBX, eggBBY), (eggBBW, eggBBH), eggBBAng)) # Otherwise just", "# Add button layout to GridLayout. checkLayout.addLayout(self.btnLayout,0,5) # Format images", "self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate a pyqtgraph ROI,", "f in range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255. out", "points # ySorted = self.originalEggBoxPoints[np.argsort(self.originalEggBoxPoints[:, 1]), :] # # Get", "self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button not implemented, just use window", "is created and displayed. self.diag.imv.timeLine.sigPositionChanged.connect(self.updateOpenCVEggROICurrEmbryo) #self.diag.keyPressEvent(self.keyPressEvent) #============================================================================== # Generate data", "between the # # The point with the largest distance", "= -1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]), :] # Get bottom", "self.exit_btn.setMinimumHeight(40); # Make button layout self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1)", "more easy handling of corner handles for tracking user chagnges.", "from scipy.spatial import distance as dist import glob import re", "#============================================================================== def updateOpenCVEggROINewEmbryo(self): # Remove old ROI if (hasattr(self, 'roi')):", "cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints = self.currROI_eggBoxPoints #============================================================================== # Assign", "# Get image #============================================================================== def imImport(self): for f in range(len(self.eggUIimPaths)):", "#============================================================================== # def closeEvent(self, event): # # quit_msg = \"Are", "= self.eggRotBBox[self.intDivVal,self.withinSeqVal] # #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # # Modified", "relevant video position and ROI. # self.getSeqValsAndCurrROI() # # Get", "1]), :] (bl, br) = bottomMost # Use bottom-left coordinate", "self.diag.table.setItem(m, n, newitem) # Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) # Adjust size", "# self.diag.imv.show() # #======== #============================================================================== #============================================================================== # ROI functions #==============================================================================", "self.embryo = embryo self.embryoFolders = glob.glob(parentPath + \"*/\" + embryo", "newitem = QtGui.QTableWidgetItem(item) self.diag.table.setItem(m, n, newitem) newitem.setBackground(self.tableCols[m]) #Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders)", "as a starting point. #if len(self.updatedEggROI) == 0: self.updatedEggROI =", "just on the first column self.diag.table.setRowCount(int(len(self.embryoLabels))) self.diag.table.setColumnCount(2) # Highlight first", "self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #============================================================================== # Generate a", "eggRotBBox[vidTime][3]]) # else: # # Random angle ROIs # self.roi", "im = cv2.imread(self.eggUIimPaths[self.diag.imv.currentIndex],cv2.IMREAD_ANYDEPTH) # ran = (im.max()-im.min())/255. # out =", "the indices for current frame if self.eggInt != 1234: self.divVal", "on button click. #============================================================================== def cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox self.originalEggBoxPoints", "* len(list(self.embryoLabels))} self.tableCols = [QtGui.QColor(0,0,100,120)]* len(list(self.embryoLabels)) # Enter data onto", "# The point with the largest distance will be our", "Close button - not implemented (hidden) #============================================================================== #============================================================================== # def", "# Sets different alignment data just on the first column", "bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] # (bl, br) = bottomMost", "self.roi = pg.ROI([bottomMost[0][0], bottomMost[0][1]], [-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi", "# event.ignore() # #============================================================================== #============================================================================== # #self.originalEggRotBBox = eggRotBBox.copy() #", "#============================================================================== def createOpenCVEggROI(self): # Get relevant sequence position and ROI.", "bottom most bottomMost = bottomMost[np.argsort(bottomMost[:, 1]), :] (bl, br) =", "for dealing with modifications to roi self.updatedEggROI=[] ### Still to", "# Save updated # If more than one frame eggID", "changeX, changeY, changeScaleX, changeScaleY, changeAngle # Get changes to ROI", "= cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255. out = (im/ran) out =", "ROI on button click. #============================================================================== def cpROI(self): self.originalEggRotBBox = self.currROI_eggRotBBox", "PyQt4.Qt import * #%% class eggUI(QDialog): ''' createOpenCVEggROI : take", "Shift to 1 and 89 as a bodge fix. if", "eggBBH), eggBBAng)) # Otherwise just save simply else: self.eggRotBBox[0,self.intDivVal] =", "= int(self.divVal) self.withinSeqVal = int((self.divVal - self.intDivVal)*len(self.eggRotBBox[self.intDivVal])) self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal]", "self.intDivVal = int(self.divVal) self.currROI_eggRotBBox = self.eggRotBBox[0,self.intDivVal] self.currROI_eggBoxPoints = self.eggBoxPoints[0,self.intDivVal] #==============================================================================", "values for ROI data. #============================================================================== def getSeqValsAndCurrROI(self): # Calculate the", ":] topMost = ySorted[2:, :] # Get bottom most bottomMost", "ROI - note non 0,or 90 degree angles, require different", "self.diag.table.setShowGrid(True) self.diag.table.setHorizontalHeaderLabels(['Embryo', 'Sorted']) # Sets different alignment data just on", "= pg.ROI([bl[0], bl[1]], [self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]],", "# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # roi =", "applyCopiedROI(self): self.getSeqValsAndCurrROI() # Store copied ROI to embryo sequence ROIs", "# Add handles self.roi.addRotateHandle([1, 0],[0.5,0.5]) self.roi.addRotateHandle([0, 1], [0.5,0.5]) self.roi.addScaleHandle([1, 1],", "self.currROI_eggRotBBox[4] == -0: #self.currROI_eggRotBBox[4] = -1 ySorted = self.currROI_eggBoxPoints[np.argsort(self.currROI_eggBoxPoints[:, 1]),", "QtGui.QPushButton('&Use Copied ROI') self.useCpROI_btn.setMinimumHeight(40); self.noEgg_btn = QtGui.QPushButton('&No Egg') self.noEgg_btn.setMinimumHeight(40); self.approveROI_btn", "# self.updatedEggROI=[] # ### Still to do... # self.diag.imv.addItem(self.roi) #", "# out = (im/ran) # out = out-out.min() # self.compSeq[self.diag.imv.currentIndex]", "#else: #============================================================================== # Update the ROI for current embryo. #==============================================================================", "3) # Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5) # Apply layout", "index values for ROI data. #============================================================================== def getSeqValsAndCurrROI(self): # Calculate", "== -90.0) | (self.currROI_eggRotBBox[4] == -0.0)| (self.currROI_eggRotBBox[4] == 0.0): self.roi", "ROI scale, angle and position roiChanges = self.roi.getGlobalTransform() changeX =", "Update ROI. #============================================================================== def updateROI(self): #global vidTime, xyPosHandles, ellipse, changeAngle,", "#============================================================================== # Get folders for a particular embryo #============================================================================== def", "# #self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] # # # Modified version of", "checkLayout.setRowStretch(0, 1) checkLayout.setRowStretch(1, 3) # Add to layout checkLayout.addWidget(self.diag.imv,0,0,2,2) checkLayout.addWidget(self.diag.table,1,5)", "- not implemented (hidden) #============================================================================== #============================================================================== # def closeEvent(self, event):", "self.diag.table.resizeColumnsToContents() #============================================================================== # Update table when approve ROI button clicked.", "self.getSeqValsAndCurrROI() # Store copied ROI to embryo sequence ROIs if", "self.currROI_eggRotBBox = self.eggRotBBox[self.intDivVal,self.withinSeqVal] self.currROI_eggBoxPoints = self.eggBoxPoints[self.intDivVal,self.withinSeqVal] else: self.divVal = self.diag.imv.currentIndex", "#Add Header self.diag.table.setHorizontalHeaderLabels(horHeaders) #Adjust size of Table self.diag.table.resizeRowsToContents() #============================================================================== #", "X size # # Rectangular ROI used to enable more", "Remove buttons from ImageView widget self.diag.imv.ui.roiBtn.hide() self.diag.imv.ui.menuBtn.hide() # Make tableview", "self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1) self.btnLayout.addWidget(self.approveROI_btn,1,0) # Exit button", "self.approveROI_btn = QtGui.QPushButton('&Approve ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); #", "= 'True' # Make var for dealing with modifications to", "else: # # Random angle ROIs # self.roi = pg.ROI([bottomMost[0][0],", "[-self.currROI_eggRotBBox[2], self.currROI_eggRotBBox[3]]) self.roi.setAngle(self.currROI_eggRotBBox[4], update=True) # roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [-eggRotBBox[vidTime][2],", "self.diag.imv.show() # #======== #============================================================================== #============================================================================== # ROI functions #============================================================================== #==============================================================================", "dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] # (tl, tr) = topMost[np.argsort(D)[::-1], :] #", "self.divVal = self.diag.imv.currentIndex/float(len(self.eggRotBBox[1])) self.intDivVal = int(self.divVal) self.withinSeqVal = int((self.divVal -", "self.diag.imv.addItem(self.roi) self.roi.sigRegionChangeFinished.connect(self.updateROI) #============================================================================== # Update ROI for new embryo. #==============================================================================", "sliderUpdate = QtCore.pyqtSignal() embryoUpdate = QtCore.pyqtSignal() keyPressed = QtCore.pyqtSignal() def", "for f in range(len(self.eggUIimPaths)): im = cv2.imread(self.eggUIimPaths[f],cv2.IMREAD_ANYDEPTH) ran = (im.max()-im.min())/255.", "coordinate as anchor to calculate the Euclidean distance between the", "self.diag.imv.currentIndex self.intDivVal = int(self.divVal) self.eggRotBBox[0,self.intDivVal] = self.originalEggRotBBox self.eggBoxPoints[0,self.intDivVal] = self.originalEggBoxPoints", "roiChanges = self.roi.getGlobalTransform() changeX = -roiChanges.getTranslation()[0] changeY = roiChanges.getTranslation()[1] changeScaleX", "eggBoxPoints, embryoLabels, eggInt): self.eggInt = eggInt self.embryoLabels = embryoLabels self.diag.setWindowTitle('Identify", "Remove previous if (hasattr(self, 'roi')): self.diag.imv.removeItem(self.roi) # Get relevant video", "# Make button layout self.btnLayout = QGridLayout() self.btnLayout.addWidget(self.cpROI_btn,0,0) self.btnLayout.addWidget(self.useCpROI_btn,0,1) self.btnLayout.addWidget(self.noEgg_btn,1,1)", "= QPushButton('Save', self) #============================================================================== # #============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels,", "# roi = pg.EllipseROI([bottomMost[0][0], bottomMost[0][1]], [eggRotBBox[vidTime][2], eggRotBBox[vidTime][3]]) # Debug #", "in range(len(self.tSeqd)): self.tSeqd[l] = ims[l].T #============================================================================== # Get folders for", "QPushButton('Save', self) #============================================================================== # #============================================================================== def showUI(self,ims,eggRotBBox, eggBoxPoints, embryoLabels, eggInt):", "global parentPath, vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv = pg.ImageView() self.btn_save =", "QtGui.QDialog() global parentPath, vidTime self.diag.setWindowTitle('Identify eggs') self.diag.imv = pg.ImageView() self.btn_save", "point D = dist.cdist(bl[np.newaxis], topMost, \"euclidean\")[0] (tl, tr) = topMost[np.argsort(D)[::-1],", "#============================================================================== # Update the user interface #============================================================================== def updateUI(self,ims,eggRotBBox, eggBoxPoints):", "ROIs') self.approveROI_btn.setMinimumHeight(40); self.exit_btn = QtGui.QPushButton('Exit') self.exit_btn.setMinimumHeight(40); # Make button layout", "# Store nans in place of ROI if self.eggInt !=", "def updateImage(self): # self.getSeqValsAndCurrROI() # #self.UI.compSeq[e*len(self.eggIDIms):(e*len(self.eggIDIms)+len(self.eggIDIms))] = self.seq # #self.UI.comp(self.imImport(self.diag.imv.currentIndex()))", "ROI self.cpROI_btn.clicked.connect(self.cpROI) # Apply copied ROI self.useCpROI_btn.clicked.connect(self.applyCopiedROI) # Assign nan", "ySorted[:2, :] topMost = ySorted[2:, :] # Get bottom most", "QtGui import numpy as np from scipy.spatial import distance as", "from PyQt5 import QtGui from PyQt5.QtCore import * from PyQt5.QtGui", "copied ROI to embryo sequence ROIs if self.eggInt != 1234:" ]
[ "contador in range(tamanho-1, -1, -1): inverso += frase[contador] print('O inverso", "= input('Digite uma frase: ').upper().strip().replace(' ', '') tamanho = int(len(frase))", "simples: # inverso = frase[::-1] for contador in range(tamanho-1, -1,", "inverso += frase[contador] print('O inverso de {} é {}'.format(frase, inverso))", "é {}'.format(frase, inverso)) if frase == inverso: print('Temos um palíndromo!')", "== inverso: print('Temos um palíndromo!') else: print('A frase digitada não", "um palíndromo!') else: print('A frase digitada não é um palíndromo!')", "inverso)) if frase == inverso: print('Temos um palíndromo!') else: print('A", "uma frase: ').upper().strip().replace(' ', '') tamanho = int(len(frase)) inverso =", "').upper().strip().replace(' ', '') tamanho = int(len(frase)) inverso = '' #Opção", "frase == inverso: print('Temos um palíndromo!') else: print('A frase digitada", "= frase[::-1] for contador in range(tamanho-1, -1, -1): inverso +=", "inverso de {} é {}'.format(frase, inverso)) if frase == inverso:", "{} é {}'.format(frase, inverso)) if frase == inverso: print('Temos um", "tamanho = int(len(frase)) inverso = '' #Opção mais simples: #", "= '' #Opção mais simples: # inverso = frase[::-1] for", "range(tamanho-1, -1, -1): inverso += frase[contador] print('O inverso de {}", "for contador in range(tamanho-1, -1, -1): inverso += frase[contador] print('O", "input('Digite uma frase: ').upper().strip().replace(' ', '') tamanho = int(len(frase)) inverso", "inverso = frase[::-1] for contador in range(tamanho-1, -1, -1): inverso", "de {} é {}'.format(frase, inverso)) if frase == inverso: print('Temos", "frase: ').upper().strip().replace(' ', '') tamanho = int(len(frase)) inverso = ''", "print('O inverso de {} é {}'.format(frase, inverso)) if frase ==", "frase[::-1] for contador in range(tamanho-1, -1, -1): inverso += frase[contador]", "+= frase[contador] print('O inverso de {} é {}'.format(frase, inverso)) if", "int(len(frase)) inverso = '' #Opção mais simples: # inverso =", "{}'.format(frase, inverso)) if frase == inverso: print('Temos um palíndromo!') else:", "-1): inverso += frase[contador] print('O inverso de {} é {}'.format(frase,", "print('Temos um palíndromo!') else: print('A frase digitada não é um", "if frase == inverso: print('Temos um palíndromo!') else: print('A frase", "'') tamanho = int(len(frase)) inverso = '' #Opção mais simples:", "#Opção mais simples: # inverso = frase[::-1] for contador in", "-1, -1): inverso += frase[contador] print('O inverso de {} é", "mais simples: # inverso = frase[::-1] for contador in range(tamanho-1,", "'' #Opção mais simples: # inverso = frase[::-1] for contador", "frase[contador] print('O inverso de {} é {}'.format(frase, inverso)) if frase", "# inverso = frase[::-1] for contador in range(tamanho-1, -1, -1):", "', '') tamanho = int(len(frase)) inverso = '' #Opção mais", "in range(tamanho-1, -1, -1): inverso += frase[contador] print('O inverso de", "= int(len(frase)) inverso = '' #Opção mais simples: # inverso", "frase = input('Digite uma frase: ').upper().strip().replace(' ', '') tamanho =", "inverso = '' #Opção mais simples: # inverso = frase[::-1]", "inverso: print('Temos um palíndromo!') else: print('A frase digitada não é" ]
[ "expiry 2 years, volatility 30% \"\"\" pricer = BinomialTreePricer(steps=100) option", "OptionType, Option class BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European option, spot price", "50, 52, 0.05, 2, 0.3) result = pricer.price_option(option) self.assertEqual(6.7781, result)", "unittest import TestCase from options.pricing.binomial_trees import BinomialTreePricer from options.option import", "volatility 30% \"\"\" pricer = BinomialTreePricer(steps=100) option = Option(OptionType.PUT, 50,", "import OptionType, Option class BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European option, spot", "option, spot price 50, strike price 52, risk free interest", "options.option import OptionType, Option class BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European option,", "price 50, strike price 52, risk free interest rate 5%", "options.pricing.binomial_trees import BinomialTreePricer from options.option import OptionType, Option class BinomialTreeTestCase(TestCase):", "from options.option import OptionType, Option class BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European", "import TestCase from options.pricing.binomial_trees import BinomialTreePricer from options.option import OptionType,", "option = Option(OptionType.PUT, 50, 52, 0.05, 2, 0.3) result =", "BinomialTreePricer from options.option import OptionType, Option class BinomialTreeTestCase(TestCase): def test_basic(self):", "from unittest import TestCase from options.pricing.binomial_trees import BinomialTreePricer from options.option", "free interest rate 5% expiry 2 years, volatility 30% \"\"\"", "TestCase from options.pricing.binomial_trees import BinomialTreePricer from options.option import OptionType, Option", "from options.pricing.binomial_trees import BinomialTreePricer from options.option import OptionType, Option class", "class BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European option, spot price 50, strike", "30% \"\"\" pricer = BinomialTreePricer(steps=100) option = Option(OptionType.PUT, 50, 52,", "price 52, risk free interest rate 5% expiry 2 years,", "def test_basic(self): \"\"\"European option, spot price 50, strike price 52,", "BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European option, spot price 50, strike price", "52, risk free interest rate 5% expiry 2 years, volatility", "5% expiry 2 years, volatility 30% \"\"\" pricer = BinomialTreePricer(steps=100)", "spot price 50, strike price 52, risk free interest rate", "Option class BinomialTreeTestCase(TestCase): def test_basic(self): \"\"\"European option, spot price 50,", "interest rate 5% expiry 2 years, volatility 30% \"\"\" pricer", "BinomialTreePricer(steps=100) option = Option(OptionType.PUT, 50, 52, 0.05, 2, 0.3) result", "risk free interest rate 5% expiry 2 years, volatility 30%", "strike price 52, risk free interest rate 5% expiry 2", "\"\"\"European option, spot price 50, strike price 52, risk free", "years, volatility 30% \"\"\" pricer = BinomialTreePricer(steps=100) option = Option(OptionType.PUT,", "pricer = BinomialTreePricer(steps=100) option = Option(OptionType.PUT, 50, 52, 0.05, 2,", "test_basic(self): \"\"\"European option, spot price 50, strike price 52, risk", "import BinomialTreePricer from options.option import OptionType, Option class BinomialTreeTestCase(TestCase): def", "2 years, volatility 30% \"\"\" pricer = BinomialTreePricer(steps=100) option =", "\"\"\" pricer = BinomialTreePricer(steps=100) option = Option(OptionType.PUT, 50, 52, 0.05,", "50, strike price 52, risk free interest rate 5% expiry", "= Option(OptionType.PUT, 50, 52, 0.05, 2, 0.3) result = pricer.price_option(option)", "= BinomialTreePricer(steps=100) option = Option(OptionType.PUT, 50, 52, 0.05, 2, 0.3)", "Option(OptionType.PUT, 50, 52, 0.05, 2, 0.3) result = pricer.price_option(option) self.assertEqual(6.7781,", "rate 5% expiry 2 years, volatility 30% \"\"\" pricer =" ]
[ "click1 == 0: click1 = 0 # global base #", "beep\") play_normal_beep() # pass # print(time.time()*1000) # print() def real_time():", "= False did_change = False count = 0 ig =", "print(metronome_interval) # metronome_time = program_time - metronome_start_time if metronome_time >=", "0 update_split() return stage += 1 ind = 0 update_split()", "def write_to_log(text): pass # log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil =", "did_change = True # print(latest + \"\\nTime: \" + run_time)", "= False metronome_active = False metronome_beats = int(data2['metronome_beats']) listener =", "1: rt2 = time.time() real_time = rt2 - rt rtc", "return False # mouse.Listener.stop(listener) # print(\"Right Click Detected (pressed)\") with", "True), ([ \"Savannah\", \"Desert\", \"Plains\", \"Other\" ], False), ([ \"0-15\",", "= json.load(json_file) try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount = data['stat.playOneMinute']", "0 metronome_armed = False metronome_running = False metronome_active = False", "did_change # print(\"-------------------------\") if data2['1.7+'] == 'false': try: global cur_fil", "metronome_armed = False metronome_running = False metronome_active = False metronome_beats", "[] # self.attempts = [] # # convert actions to", "think this stuff is worth it, you can buy me", "'true': greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'],", "= False running_path = Path.cwd() NUM_CHARS = 11 system_type =", "# base = amount2 def right_click(): global click1 global click2", "len(item[0]): ind = 0 else: ind = 0 update_split() def", "with open(json_file) as json_file: data2 = json.load(json_file) if data2['borderless'] ==", "# # class Category: # def __init__(): # self.actions =", "= 1 # class Attempt: stage = 0 ind =", "os.chdir(latest + '/stats/') else: os.chdir(latest + '\\\\stats\\\\') json_file = glob.glob('*.json')", "METRONOME CODE ''' ''' Metronome mouse listener ''' def exit_handler():", "window = tk.Tk() # bg = BindGlobal(widget=window) window.text = tk.StringVar()", "window.text3.set(text_str) window.after(rta_update, update_count) # def update_split() def on_press(event): left_click() def", "1 click1 = 1 # print(float(amount2)) # print(\"hehe\") global base", "== type([]): text_str = text_str[ind] window.text4.set(text_str) def reset_split(): global ind,", "str(datetime.timedelta(seconds=real_time)) # rt = float(amount2) - float(base) # rtc =", "data2['enable_metronome'] == 'true': start_metronome(None) if \"arm_metronome\" in txt: metronome_armed =", "write_to_log(\"reset {}\".format(str(amount2-base))) base = amount2 def increment_counter(): global count count", "global metronome_interval global metronome_running if data2['has_metronome_preset'] == 'true': play_metronome_preset() metronome_running", "0 def get_time(): global last_amount global old_version global amount2 global", "= tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'], cycle) #", "open(json_file) as json_file: data2 = json.load(json_file) if data2['borderless'] == 'true':", "metronome_running = False return # print(metronome_time) # print(metronome_interval) # print(time.time()*1000)", "= 0 base_update = int(data2['base_update']) rta_update = int(data2['rta_update']) * base_update", "json_file[0] with open(timer) as json_file: data = json.load(json_file) try: amount", "continuously read from input file every 10ms # when you", "click2 == 1: click1 = 0 click2 = 0 elif", "+ '\\\\stats\\\\') json_file = glob.glob('*.json') timer = json_file[0] with open(timer)", "''' def exit_handler(): global listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def listen_for_right_click():", "os.listdir(directory)], key=os.path.getmtime) if system_type == \"Linux\" or system_type == \"Darwin\":", "some day, and you think this stuff is worth it,", "works for the window detecting right click ''' # window.bind(data2['start_metronome'],", "#If we meet some day, and you think this stuff", "return '0:00:00.000' else: try: latest = max([os.path.join(directory,d) for d in", "retain this notice you can do whatever you want with", "start_time = round(time.time()*1000) - base_time do_metronome_action() end_time = round(time.time()*1000) -", "data[\"stats-change\"]: if \"1100\" in item: amount = item[\"1100\"] # print(amount)", "# log_fil.write(str(text)+\"\\n\") def left_click(): global click1 if click1 == 1:", "True if \"pause_timer\" in txt: left_click() if \"start_timer\" in txt:", "ind == len(item[0]): ind = 0 else: ind = 0", "metronome_time % metronome_interval == 0: if (metronome_time % (metronome_interval*4)) ==", "1 stage = 0 reset_split() return '0:00:00.000' elif click1 ==", "do_metronome_action() if click1 == 1: window.text.set(real_time()) elif click1 == 0:", "(\"Tower Build Finished\", True), (\"Tower Leave\", True), (\"Enter Stronghold\", True),", "1: click1 = 0 elif click1 == 0: click1 =", "# bg = BindGlobal(widget=window) window.text = tk.StringVar() window.text2 = tk.StringVar()", "= copy.deepcopy(rsg) update_split() def cycle(event): global ind, stage ind +=", "= os.path.expanduser(data2['linux_saves']) elif system_type == 'Darwin': directory = os.path.expanduser(data2['mac_saves']) elif", "str(count) text_str = \"\" for i in range(0, int(NUM_CHARS/2)): text_str", "print(data2['enable_metronome']) if data2['enable_metronome'] == 'true': start_metronome(None) if \"arm_metronome\" in txt:", "ind, stage, cur_stages ind = 0 stage = 0 cur_stages", "print(timer_file) data = json.load(timer_file) for item in data[\"stats-change\"]: if \"1100\"", "stage, ind item = cur_stages[stage] if item[1]: if type(item[0]) ==", "base_update = int(data2['base_update']) rta_update = int(data2['rta_update']) * base_update metronome_bpm =", "1: if old_version == True and stage == 0: ig", "# metronome_time = program_time - metronome_start_time if metronome_time >= metronome_interval", "run_time) last_amount = amount ig = 0 return run_time[:-3] except:", "data2['font_name'] rta_font_size = data2['rta_font_size'] igt_font_size = data2['igt_font_size'] font_modifiers = data2['font_modifiers']", "0 time_count = 0 rsg = [ (\"World Created\", True),", "item = cur_stages[stage] if type(item[0]) == type([]): if ind ==", "Sound playing code ''' def play_file_named(str_name): playsound((running_path / str_name).as_posix(), block", "cur_fil: cur_fil = latest world_base_time = amount # print(\"world base", "input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" # continuously read from input", "True) ] cur_stages = {} json_file = 'mct_config.json' with open(json_file)", "latest != cur_fil: cur_fil = latest world_base_time = amount #", "= True if \"pause_timer\" in txt: left_click() if \"start_timer\" in", "detecting right click ''' # window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\",", "1 return '0:00:00.000' def window2(): font_name = data2['font_name'] rta_font_size =", "update_time() if metronome_armed or time_count % 20 == 0: check_input()", "window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def update_time(): global rt global", "from input file every 10ms # when you get a", "== 1: click1 = 0 click2 = 0 elif click2", "def on_press2(event): right_click() def update_split(): global stage text_str = cur_stages[stage][0]", "log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil = log_dir / data2[\"current_section\"] #", "timer_file: # print(timer_file) data = json.load(timer_file) for item in data[\"stats-change\"]:", "False return # print(metronome_time) # print(metronome_interval) # print(time.time()*1000) if metronome_time", "# print(latest + \"\\nTime: \" + run_time) last_amount = amount", "window.text = tk.StringVar() window.text2 = tk.StringVar() window.text3 = tk.StringVar() window.text4", "world_base_time = 0 def get_time(): global last_amount global old_version global", "def on_increment_counter(event): increment_counter() def clicked3(event): sys.exit(1) def clicked2(event): right_click() def", "on_press2(event): right_click() def update_split(): global stage text_str = cur_stages[stage][0] if", "import Enum import copy #\"THE BEER-WARE LICENSE\" (Revision 42): #bleach86", "bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0, update_count) if data2['use_splits'] == 'true': split_font_size", "Spawner\", True), (\"Exit Nether\", True), (\"Tower Build Start\", True), (\"Tower", "ind += 1 item = cur_stages[stage] if type(item[0]) == type([]):", "update_count) if data2['use_splits'] == 'true': split_font_size = data2['split_font_size'] split_font =", "True json_file.close() amount2 = float(amount) / 20 run_time = str(datetime.timedelta(seconds=amount2,", "stage = 1 print(\"stop\") return rtc[:-3] else: ig = 0", "'true': if ig == 1: rt = time.time() click1 =", "= rtc[:-3] # print(diff_txt) window.text.set(diff_txt) # print(base) if click2 ==", "system_type == 'Windows': directory = os.path.expanduser(data2['windows_saves']) amount2 = 0 last_amount", "clicked3) # bg.bind(data2['start_metronome'], start_metronome) ''' this works for the window", "from enum import Enum import copy #\"THE BEER-WARE LICENSE\" (Revision", "update_split() def on_increment_counter(event): increment_counter() def clicked3(event): sys.exit(1) def clicked2(event): right_click()", "font=split_font, textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'], split) #", "# print(metronome_running) # arm_metronome = False def run_metronome(): global metronome_time", "# def read(): # def write(): # class Actions(Enum): #", "in txt: left_click() if \"start_timer\" in txt: right_click() def update_time2():", "or metronome_running: return metronome_armed = True # x = threading.Thread(target=listen_for_right_click,", "# print(amount2) run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time) if last_amount", "= 'mct_config.json' with open(json_file) as json_file: data2 = json.load(json_file) if", "0 else: ind = 0 update_split() def split(event): global stage,", "# print(run_time) if last_amount == amount: ig = 0 return", "(\"Enter Stronghold\", True), (\"Enter End\", True), (\"Finish\", True) ] cur_stages", "from playsound import playsound from enum import Enum import copy", "print(amount) latest = max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) #", "# global base # write_to_log(str(amount2-base)) # base = amount2 def", "if last_amount == amount: ig = 0 return run_time[:-3] else:", "def check_input(): txt = input_fil.read_text() input_fil.write_text(\"\") global metronome_armed # print(txt)", "data2['auto_start'] == 'true': if ig == 1: rt = time.time()", "Stronghold\", True), (\"Enter End\", True), (\"Finish\", True) ] cur_stages =", "arm_metronome = False def run_metronome(): global metronome_time global metronome_interval global", "import sys import platform import json import glob import datetime", "world_base_time = amount # print(\"world base time now {}\".format(world_base_time)) #", "to attempts # def read(): # def write(): # class", "and button == mouse.Button.right: start_metronome(None) return False # mouse.Listener.stop(listener) #", "if data2['auto_start'] == 'true': if ig == 1: rt =", "def clicked3(event): sys.exit(1) def clicked2(event): right_click() def clicked(event): left_click() def", "global metronome_armed time_count += 1 update_time() if metronome_armed or time_count", "print(run_time) if last_amount == amount: ig = 0 return run_time[:-3]", "\"Other\" ], False), ([ \"0-15\", \"15-30\", \"30-45\", \"45-60\", \"60-75\", \"75+\"", "0 rt = float(time.time()) - float(amount2) rtc = str(datetime.timedelta(seconds=rt)) stage", "cur_stages[stage][0] if type(text_str) == type([]): text_str = text_str[ind] window.text4.set(text_str) def", "def listen_for_right_click(): def on_click(x, y, button, pressed): # print(button) if", "convert actions to attempts # def read(): # def write():", "0: click2 = 1 click1 = 1 # print(float(amount2)) #", "click2 = 1 else: click1 = 0 click2 = 0", "int(data2['metronome_beats']) listener = None metronome_time = 0 base_update = int(data2['base_update'])", "rt = float(time.time()) - float(amount2) rtc = str(datetime.timedelta(seconds=rt)) stage =", "print(amount) amount2 = float(amount - world_base_time) / 20 # print(amount2)", "threading import tkinter as tk from pynput import mouse from", "rta_font = (font_name, rta_font_size, font_modifiers) igt_font = (font_name, igt_font_size, font_modifiers)", "self.attempts = [] # # convert actions to attempts #", "= True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset():", "base # write_to_log(str(amount2-base)) # base = amount2 def right_click(): global", "# print(float(amount2)) # print(\"hehe\") global base write_to_log(\"reset {}\".format(str(amount2-base))) base =", "(\"Finish\", True) ] cur_stages = {} json_file = 'mct_config.json' with", "global click1 global click2 global amount2 global old_version global stage", "is worth it, you can buy me a beer in", "input file every 10ms # when you get a \"reset", "% (metronome_interval*4)) == metronome_interval*3: # print(\"up beep\") play_up_beep() # pass", "window.text2.set(get_time()) window.after(1000, update_time2) def update_count(): count_str = str(count) text_str =", "'Windows': directory = os.path.expanduser(data2['windows_saves']) amount2 = 0 last_amount = 0", "class Attempt: stage = 0 ind = 0 time_count =", "0 window = tk.Tk() # bg = BindGlobal(widget=window) window.text =", "20 == 0: check_input() window.after(rta_update, tick_time) def check_input(): txt =", "global click1 global click2 global count global did_change count =", "int(data2['base_update']) rta_update = int(data2['rta_update']) * base_update metronome_bpm = int(data2['metronome_bpm']) metronome_interval", "rta_font_size = data2['rta_font_size'] igt_font_size = data2['igt_font_size'] font_modifiers = data2['font_modifiers'] rta_font", "window.geometry(data2['window_pos']) window.mainloop() def update_time(): global rt global program_time # do_metronome_action()", "if data2['enable_metronome'] == 'true': start_metronome(None) if \"arm_metronome\" in txt: metronome_armed", "print(metronome_running) # arm_metronome = False def run_metronome(): global metronome_time global", "base_time do_metronome_action() end_time = round(time.time()*1000) - base_time elapsed = end_time", "* 60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while metronome_running: start_time", "listener.join() ''' Sound playing code ''' def play_file_named(str_name): playsound((running_path /", "global ig global did_change # print(\"-------------------------\") if data2['1.7+'] == 'false':", "left_click() if \"start_timer\" in txt: right_click() def update_time2(): window.text2.set(get_time()) window.after(1000,", "global ind, stage, cur_stages ind = 0 stage = 0", "= os.path.expanduser(data2['mac_saves']) elif system_type == 'Windows': directory = os.path.expanduser(data2['windows_saves']) amount2", "window.text4.set(text_str) def reset_split(): global ind, stage, cur_stages ind = 0", "every 10ms # when you get a \"reset timer\" message,", "Path(directory).parent stats_dir = mc_dir / \"stats\" os.chdir(stats_dir) json_file = glob.glob('*.dat')", "def tick_time(): global time_count global metronome_armed time_count += 1 update_time()", "''' this works for the window detecting right click '''", "ig = 0 return run_time[:-3] except: ig = 1 return", "{} {}\".format(start_time, end_time, )) metronome_time += metronome_interval def do_metronome_action(): global", "# bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'], clicked3) #", "'0:00:00.000' def window2(): font_name = data2['font_name'] rta_font_size = data2['rta_font_size'] igt_font_size", "= rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) return rtc[:-3] def", "0 click2 = 0 elif click2 == 0: click2 =", "# CREATE_WORLD = 0 # START = 1 # class", "self.actions = [] # self.attempts = [] # # convert", "if ig == 1: rt = time.time() click1 = 1", "window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def update_time(): global rt", "if item[1]: if type(item[0]) == type([]): item[0].remove(item[0][ind]) if len(item[0]) ==", "json.load(json_file) try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount = data['stat.playOneMinute'] old_version", "True), (\"Enter Stronghold\", True), (\"Enter End\", True), (\"Finish\", True) ]", "font_modifiers) greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack() if data2['show_igt']", "stage = 0 ind = 0 time_count = 0 rsg", "/ 20 run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount == amount:", "def window2(): font_name = data2['font_name'] rta_font_size = data2['rta_font_size'] igt_font_size =", "\"start_timer\" in txt: right_click() def update_time2(): window.text2.set(get_time()) window.after(1000, update_time2) def", "0: ig = 0 rt = float(time.time()) - float(amount2) rtc", "'true': # print(did_change) # print(base) if did_change: rt = float(time.time())", "text_str = text_str[ind] window.text4.set(text_str) def reset_split(): global ind, stage, cur_stages", "want with this stuff. #If we meet some day, and", "def read(): # def write(): # class Actions(Enum): # CREATE_WORLD", "= str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time) if last_amount == amount: ig", "= 1 else: click1 = 0 click2 = 0 cur_fil", "0: if (metronome_time % (metronome_interval*4)) == metronome_interval*3: # print(\"up beep\")", "window2(): font_name = data2['font_name'] rta_font_size = data2['rta_font_size'] igt_font_size = data2['igt_font_size']", "stuff. #If we meet some day, and you think this", "update_count(): count_str = str(count) text_str = \"\" for i in", "if data2['auto_start'] == 'true': click1 = 1 click2 = 1", "= False return metronome_time = 0 base_time = round(time.time()*1000) metronome_interval", "global base write_to_log(\"reset {}\".format(str(amount2-base))) base = amount2 def increment_counter(): global", "font=rta_font, textvariable=window.text) greeting.pack() if data2['show_igt'] == 'true': greeting2 = tk.Label(fg=data2['igt_color'],", "you think this stuff is worth it, you can buy", "# listener.start() listener.join() ''' Sound playing code ''' def play_file_named(str_name):", "listen_for_right_click(): def on_click(x, y, button, pressed): # print(button) if pressed:", "ig = 0 rt2 = time.time() real_time = rt2 -", "count = 0 did_change = True if click2 == 1:", "= tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack() if data2['show_igt'] == 'true':", "time.time() real_time = rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) return", "], True), (\"Enter Nether\", True), (\"Find Fortress\", True), (\"Find Spawner\",", "did_change = False count = 0 ig = 0 base", "Metronome functions ''' def arm_metronome(event): global metronome_armed global metronome_running if", "def left_click(): global click1 if click1 == 1: click1 =", "if \"arm_metronome\" in txt: metronome_armed = True if \"pause_timer\" in", "/ \"input.txt\" # continuously read from input file every 10ms", "Created\", True), ([ \"Savannah\", \"Desert\", \"Plains\", \"Other\" ], False), ([", "run_time[:-3] else: did_change = True # print(latest + \"\\nTime: \"", "data2['use_counter'] == 'true': greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack()", "0 ind = 0 time_count = 0 rsg = [", "# write_to_log(str(amount2-base)) # base = amount2 def right_click(): global click1", "tk.StringVar() window.text2 = tk.StringVar() window.text3 = tk.StringVar() window.text4 = tk.StringVar()", "global time_count global metronome_armed time_count += 1 update_time() if metronome_armed", "= tk.StringVar() window.text3 = tk.StringVar() window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"]))", "functions ''' def arm_metronome(event): global metronome_armed global metronome_running if metronome_armed", "base = 0 program_time = 0 metronome_armed = False metronome_running", "if metronome_time >= metronome_interval * metronome_beats: metronome_running = False return", "0 base_time = round(time.time()*1000) metronome_interval = int(100 * 60 /", "print(did_change) # print(base) if did_change: rt = float(time.time()) - float(amount2)", "if type(item[0]) == type([]): if ind == len(item[0]): ind =", "run_metronome(): global metronome_time global metronome_interval global metronome_running if data2['has_metronome_preset'] ==", "real_time = rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) return rtc[:-3]", "global last_amount global old_version global amount2 global ig global did_change", "i in range(0, int(NUM_CHARS/2)): text_str += \" \" text_str +=", "# pass # print(time.time()*1000) # print() def real_time(): global rt", "base time now {}\".format(world_base_time)) # print(amount) amount2 = float(amount -", "print(metronome_time) # print(metronome_interval) # print(time.time()*1000) if metronome_time % metronome_interval ==", "= [] # self.attempts = [] # # convert actions", "click1 = 0 elif click1 == 0: click1 = 0", "greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def update_time():", "= 0 did_change = True if click2 == 1: click1", "Metronome mouse listener ''' def exit_handler(): global listener mouse.Listener.stop(listener) window.quit()", "\"input.txt\" # continuously read from input file every 10ms #", "if data2['show_igt'] == 'true': greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2)", "metronome_bpm = int(data2['metronome_bpm']) metronome_interval = 0 if data2['auto_start'] == 'true':", "reset_split(): global ind, stage, cur_stages ind = 0 stage =", "BindGlobal(widget=window) window.text = tk.StringVar() window.text2 = tk.StringVar() window.text3 = tk.StringVar()", "click1 if click1 == 1: click1 = 0 elif click1", "type([]): if ind == len(item[0]): ind = 0 else: ind", "metronome_running: return metronome_armed = True # x = threading.Thread(target=listen_for_right_click, daemon=True)", "greeting.after(0, tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop()", "click1 = 1 # print(float(amount2)) # print(\"hehe\") global base write_to_log(\"reset", "notice you can do whatever you want with this stuff.", "start_metronome) # bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'], start_metronome) ''' this works", "if system_type == 'Linux': directory = os.path.expanduser(data2['linux_saves']) elif system_type ==", "pass else: # print(\"normal beep\") play_normal_beep() # pass # print(time.time()*1000)", "== 1: rt2 = time.time() real_time = rt2 - rt", "if data2['1.7+'] == 'false': try: global cur_fil global world_base_time mc_dir", "play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") '''", "except: ig = 1 return '0:00:00.000' else: try: latest =", "read from input file every 10ms # when you get", "textvariable=window.text) greeting.pack() if data2['show_igt'] == 'true': greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'],", "\"Feathers\", \"Wool\", \"Gravel\" ], True), (\"Enter Nether\", True), (\"Find Fortress\",", "== True and stage == 0: ig = 0 rt", "def write(): # class Actions(Enum): # CREATE_WORLD = 0 #", "''' Metronome mouse listener ''' def exit_handler(): global listener mouse.Listener.stop(listener)", "# window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time) greeting.after(0,", "int(data2['rta_update']) * base_update metronome_bpm = int(data2['metronome_bpm']) metronome_interval = 0 if", "metronome_active = False metronome_beats = int(data2['metronome_beats']) listener = None metronome_time", "\"stats\" os.chdir(stats_dir) json_file = glob.glob('*.dat') stats_file = json_file[0] amount =", "read(): # def write(): # class Actions(Enum): # CREATE_WORLD =", "0 with open(stats_file) as timer_file: # print(timer_file) data = json.load(timer_file)", "0 rt2 = time.time() real_time = rt2 - rt rtc", "= float(amount2) - float(base) # rtc = str(datetime.timedelta(seconds=rt)) return rtc[:-3]", "print(base) if click2 == 0: rt = time.time() window.text.set(\"0:00:00.000\") #", "if pressed: if pressed and button == mouse.Button.right: start_metronome(None) return", "bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack() if data2['use_counter'] == 'true': greeting3 =", "if data2['allow_offset'] == 'true': rt += base did_change = False", "time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while metronome_running: start_time = round(time.time()*1000) - base_time", "metronome_running: return # print(metronome_interval) # metronome_time = program_time - metronome_start_time", "latest = max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) if system_type", "on_press(event): left_click() def on_press2(event): right_click() def update_split(): global stage text_str", "old_version global stage global ig global did_change if data2['auto_adjust'] ==", "click2 == 0: click2 = 1 click1 = 1 #", "else: os.chdir(latest + '\\\\stats\\\\') json_file = glob.glob('*.json') timer = json_file[0]", "mouse.Listener.stop(listener) # print(\"Right Click Detected (pressed)\") with mouse.Listener(on_click=on_click) as listener:", "\"arm_metronome\" in txt: metronome_armed = True if \"pause_timer\" in txt:", "global ind, stage ind += 1 item = cur_stages[stage] if", "click1 = 1 click2 = 1 else: click1 = 0", "metronome_running if data2['has_metronome_preset'] == 'true': play_metronome_preset() metronome_running = False return", "click1 global click2 global amount2 global old_version global stage global", "end_time, )) metronome_time += metronome_interval def do_metronome_action(): global metronome_running global", "copy.deepcopy(rsg) update_split() def cycle(event): global ind, stage ind += 1", "font_name = data2['font_name'] rta_font_size = data2['rta_font_size'] igt_font_size = data2['igt_font_size'] font_modifiers", "cur_stages ind = 0 stage = 0 cur_stages = copy.deepcopy(rsg)", "= 0 cur_stages = copy.deepcopy(rsg) update_split() def cycle(event): global ind,", "== 'Linux': directory = os.path.expanduser(data2['linux_saves']) elif system_type == 'Darwin': directory", "global amount2 global ig global did_change # print(\"-------------------------\") if data2['1.7+']", "= 0 rt2 = time.time() real_time = rt2 - rt", "world_base_time mc_dir = Path(directory).parent stats_dir = mc_dir / \"stats\" os.chdir(stats_dir)", "for d in os.listdir(directory)], key=os.path.getmtime) # print(latest) if latest !=", "= 1 stage = 0 reset_split() return '0:00:00.000' elif click1", "# print(\"-------------------------\") if data2['1.7+'] == 'false': try: global cur_fil global", "playsound import playsound from enum import Enum import copy #\"THE", "# print(\"{} {} {}\".format(start_time, end_time, )) metronome_time += metronome_interval def", "mouse.Listener(on_click=on_click) as listener: # listener.start() listener.join() ''' Sound playing code", "True), (\"Finish\", True) ] cur_stages = {} json_file = 'mct_config.json'", "metronome_armed # print(txt) if \"start_metronome\" in txt: print(data2['enable_metronome']) if data2['enable_metronome']", "Build Start\", True), (\"Tower Build Finished\", True), (\"Tower Leave\", True),", "play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions '''", "'mct_config.json' with open(json_file) as json_file: data2 = json.load(json_file) if data2['borderless']", "window.text3 = tk.StringVar() window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt", "Fortress\", True), (\"Find Spawner\", True), (\"Exit Spawner\", True), (\"Exit Nether\",", "Actions(Enum): # CREATE_WORLD = 0 # START = 1 #", "left_click() def on_press2(event): right_click() def update_split(): global stage text_str =", "\"\" for i in range(0, int(NUM_CHARS/2)): text_str += \" \"", "greeting4.pack() # bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'], split) # bg.gbind(data2['skip'], skip)", "data['stat.playOneMinute'] old_version = True json_file.close() amount2 = float(amount) / 20", "in txt: print(data2['enable_metronome']) if data2['enable_metronome'] == 'true': start_metronome(None) if \"arm_metronome\"", "pressed: if pressed and button == mouse.Button.right: start_metronome(None) return False", "click2 = 0 elif click2 == 0: click2 = 1", "update_time2) def update_count(): count_str = str(count) text_str = \"\" for", "len(item[0]) == 0: stage += 1 ind = 0 update_split()", "(pressed)\") with mouse.Listener(on_click=on_click) as listener: # listener.start() listener.join() ''' Sound", "(\"Exit Spawner\", True), (\"Exit Nether\", True), (\"Tower Build Start\", True),", "True), (\"Tower Leave\", True), (\"Enter Stronghold\", True), (\"Enter End\", True),", "def update_split(): global stage text_str = cur_stages[stage][0] if type(text_str) ==", "= 0 reset_split() return '0:00:00.000' elif click1 == 1: if", "# print(did_change) # print(base) if did_change: rt = float(time.time()) -", "except: amount = data['stat.playOneMinute'] old_version = True json_file.close() amount2 =", "check_input(): txt = input_fil.read_text() input_fil.write_text(\"\") global metronome_armed # print(txt) if", "0 elif click1 == 0: click1 = 0 # global", "import atexit import os import sys import platform import json", "= 0 window = tk.Tk() # bg = BindGlobal(widget=window) window.text", "in item: amount = item[\"1100\"] # print(amount) latest = max([os.path.join(directory,d)", "right_click(): global click1 global click2 global count global did_change count", "system_type == 'Linux': directory = os.path.expanduser(data2['linux_saves']) elif system_type == 'Darwin':", "0 elif click2 == 0: click2 = 1 click1 =", "text_str[ind] window.text4.set(text_str) def reset_split(): global ind, stage, cur_stages ind =", "# log_fil.touch() # log_fil = log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def left_click():", "= json.load(timer_file) for item in data[\"stats-change\"]: if \"1100\" in item:", "font=rta_font, textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0, update_count) if", "data2['use_splits'] == 'true': split_font_size = data2['split_font_size'] split_font = (font_name, split_font_size,", "= 0 elif click1 == 0: click1 = 0 #", "def cycle(event): global ind, stage ind += 1 item =", "True), (\"Tower Build Finished\", True), (\"Tower Leave\", True), (\"Enter Stronghold\",", "data2['borderless'] else: data2['borderless'] = False running_path = Path.cwd() NUM_CHARS =", "this notice you can do whatever you want with this", "10ms # when you get a \"reset timer\" message, reset", "system_type == \"Linux\" or system_type == \"Darwin\": os.chdir(latest + '/stats/')", "program_time # do_metronome_action() if click1 == 1: window.text.set(real_time()) elif click1", "global amount2 global old_version global stage global ig global did_change", "True if click2 == 1: click1 = 0 click2 =", "round(time.time()*1000) metronome_interval = int(100 * 60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) #", "return # print(metronome_interval) # metronome_time = program_time - metronome_start_time if", "float(time.time()) - float(amount2) rtc = str(datetime.timedelta(seconds=rt)) stage = 1 print(\"stop\")", "# print(\"normal beep\") play_normal_beep() # pass # print(time.time()*1000) # print()", "print(metronome_interval)555 while metronome_running: start_time = round(time.time()*1000) - base_time do_metronome_action() end_time", "base_time = round(time.time()*1000) metronome_interval = int(100 * 60 / metronome_bpm)*10", "ind = 0 update_split() return stage += 1 ind =", "split_font_size = data2['split_font_size'] split_font = (font_name, split_font_size, font_modifiers) greeting4 =", "# rt = time.time() diff = amount2 - base rtc", "= 0 return run_time[:-3] else: did_change = True print(latest +", "= 0 base = 0 program_time = 0 metronome_armed =", "split_font = (font_name, split_font_size, font_modifiers) greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font,", "window.after(int(data2['rta_update'])/10, update_time) def tick_time(): global time_count global metronome_armed time_count +=", "i in range(0, int(NUM_CHARS/2)): text_str += \" \" window.text3.set(text_str) window.after(rta_update,", "- world_base_time) / 20 # print(amount2) run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5))", "base_update metronome_bpm = int(data2['metronome_bpm']) metronome_interval = 0 if data2['auto_start'] ==", "pynput import mouse from pathlib import Path from playsound import", "\" + run_time) last_amount = amount ig = 0 return", "False), ([ \"Iron\", \"Logs\", \"Feathers\", \"Wool\", \"Gravel\" ], True), (\"Enter", "for i in range(0, int(NUM_CHARS/2)): text_str += \" \" window.text3.set(text_str)", "float(amount2) if data2['allow_offset'] == 'true': rt += base did_change =", "pass # print(time.time()*1000) # print() def real_time(): global rt global", "= 0 update_split() return stage += 1 ind = 0", "def clicked(event): left_click() def write_to_log(text): pass # log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\")", "1: click1 = 0 click2 = 0 elif click2 ==", "== 0: click1 = 0 # global base # write_to_log(str(amount2-base))", "def update_split() def on_press(event): left_click() def on_press2(event): right_click() def update_split():", "import os import sys import platform import json import glob", "'true': play_metronome_preset() metronome_running = False return metronome_time = 0 base_time", "= 1 return '0:00:00.000' else: try: latest = max([os.path.join(directory,d) for", "0: # rt = time.time() diff = amount2 - base", "= (font_name, rta_font_size, font_modifiers) igt_font = (font_name, igt_font_size, font_modifiers) greeting", "rtc[:-3] else: ig = 0 rt2 = time.time() real_time =", "igt_font_size, font_modifiers) greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack() if", "= data2['igt_font_size'] font_modifiers = data2['font_modifiers'] rta_font = (font_name, rta_font_size, font_modifiers)", "data2['borderless'] == 'true': data2['borderless'] else: data2['borderless'] = False running_path =", "return run_time[:-3] else: did_change = True print(latest + \"\\nTime: \"", "\" \" text_str += count_str for i in range(0, int(NUM_CHARS/2)):", "print(\"normal beep\") play_normal_beep() # pass # print(time.time()*1000) # print() def", "0 # START = 1 # class Attempt: stage =", "type([]): text_str = text_str[ind] window.text4.set(text_str) def reset_split(): global ind, stage,", "def play_file_named(str_name): playsound((running_path / str_name).as_posix(), block = True) def play_up_beep():", "beep\") play_up_beep() # pass else: # print(\"normal beep\") play_normal_beep() #", "platform import json import glob import datetime import time import", "False # mouse.Listener.stop(listener) # print(\"Right Click Detected (pressed)\") with mouse.Listener(on_click=on_click)", "0 reset_split() return '0:00:00.000' elif click1 == 1: if old_version", "= 1 # print(float(amount2)) # print(\"hehe\") global base write_to_log(\"reset {}\".format(str(amount2-base)))", "0: check_input() window.after(rta_update, tick_time) def check_input(): txt = input_fil.read_text() input_fil.write_text(\"\")", "def split(event): global stage, ind item = cur_stages[stage] if item[1]:", "+= 1 item = cur_stages[stage] if type(item[0]) == type([]): if", "rta_font_size, font_modifiers) igt_font = (font_name, igt_font_size, font_modifiers) greeting = tk.Label(fg=data2['rta_color'],", "% 20 == 0: check_input() window.after(rta_update, tick_time) def check_input(): txt", "+ run_time) last_amount = amount ig = 0 return run_time[:-3]", "0 if data2['auto_start'] == 'true': click1 = 1 click2 =", "click2 global amount2 global old_version global stage global ig global", "stage stage += 1 update_split() def on_increment_counter(event): increment_counter() def clicked3(event):", "def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\")", "str_name).as_posix(), block = True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\")", "* metronome_beats: metronome_running = False return # print(metronome_time) # print(metronome_interval)", "ig global did_change if data2['auto_adjust'] == 'true': # print(did_change) #", "bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'], on_press2) # if data2['enable_metronome'] == 'true':", "= str(datetime.timedelta(seconds=rt)) return rtc[:-3] else: if click1 == 1: rt2", "pass # log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil = log_dir /", "mc_dir = Path(directory).parent stats_dir = mc_dir / \"stats\" os.chdir(stats_dir) json_file", "bg.bind(data2['start_metronome'], start_metronome) ''' this works for the window detecting right", "False running_path = Path.cwd() NUM_CHARS = 11 system_type = platform.system()", "# print(\"hehe\") global base write_to_log(\"reset {}\".format(str(amount2-base))) base = amount2 def", "'true': rt += base did_change = False if data2['auto_start'] ==", "= (font_name, igt_font_size, font_modifiers) greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text)", "True print(latest + \"\\nTime: \" + run_time) last_amount = amount", "from pathlib import Path from playsound import playsound from enum", "not metronome_running: return # print(metronome_interval) # metronome_time = program_time -", "did_change count = 0 did_change = True if click2 ==", "day, and you think this stuff is worth it, you", "+= 1 ind = 0 update_split() def skip(event): global stage", "bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0, update_count)", "rt global click1 global click2 global amount2 global old_version global", "global metronome_running global metronome_interval if not metronome_running: return # print(metronome_interval)", "a \"reset timer\" message, reset the timer # # class", "\"Savannah\", \"Desert\", \"Plains\", \"Other\" ], False), ([ \"0-15\", \"15-30\", \"30-45\",", "CREATE_WORLD = 0 # START = 1 # class Attempt:", "= int(data2['metronome_bpm']) metronome_interval = 0 if data2['auto_start'] == 'true': click1", "As long as you retain this notice you can do", "glob.glob('*.json') timer = json_file[0] with open(timer) as json_file: data =", "return '0:00:00.000' def window2(): font_name = data2['font_name'] rta_font_size = data2['rta_font_size']", "start_metronome(event): run_metronome() # print(metronome_running) # arm_metronome = False def run_metronome():", "os.path.expanduser(data2['windows_saves']) amount2 = 0 last_amount = 0 window = tk.Tk()", "if type(text_str) == type([]): text_str = text_str[ind] window.text4.set(text_str) def reset_split():", "attempts # def read(): # def write(): # class Actions(Enum):", "try: latest = max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) if", "import datetime import time import threading import tkinter as tk", "tick_time) def check_input(): txt = input_fil.read_text() input_fil.write_text(\"\") global metronome_armed #", "click2 = 1 click1 = 1 # print(float(amount2)) # print(\"hehe\")", "window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt = time.time() old_version", "tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack() if data2['use_counter'] == 'true': greeting3", "# print(txt) if \"start_metronome\" in txt: print(data2['enable_metronome']) if data2['enable_metronome'] ==", "== 0: rt = time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time) def", "rsg = [ (\"World Created\", True), ([ \"Savannah\", \"Desert\", \"Plains\",", "pressed and button == mouse.Button.right: start_metronome(None) return False # mouse.Listener.stop(listener)", "return run_time[:-3] else: did_change = True # print(latest + \"\\nTime:", "print(\"Right Click Detected (pressed)\") with mouse.Listener(on_click=on_click) as listener: # listener.start()", "\"Plains\", \"Other\" ], False), ([ \"0-15\", \"15-30\", \"30-45\", \"45-60\", \"60-75\",", "\" window.text3.set(text_str) window.after(rta_update, update_count) # def update_split() def on_press(event): left_click()", "type(item[0]) == type([]): item[0].remove(item[0][ind]) if len(item[0]) == 0: stage +=", "stats_dir = mc_dir / \"stats\" os.chdir(stats_dir) json_file = glob.glob('*.dat') stats_file", "while metronome_running: start_time = round(time.time()*1000) - base_time do_metronome_action() end_time =", "round(time.time()*1000) - base_time do_metronome_action() end_time = round(time.time()*1000) - base_time elapsed", "1 ind = 0 update_split() return stage += 1 ind", "if data2['auto_adjust'] == 'true': # print(did_change) # print(base) if did_change:", "print(latest) if latest != cur_fil: cur_fil = latest world_base_time =", "data2['1.7+'] == 'false': try: global cur_fil global world_base_time mc_dir =", "amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount = data['stat.playOneMinute'] old_version = True", "return # print(metronome_time) # print(metronome_interval) # print(time.time()*1000) if metronome_time %", "False did_change = False count = 0 ig = 0", "= 0 stage = 0 cur_stages = copy.deepcopy(rsg) update_split() def", "window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt = time.time() old_version = False did_change", "= round(time.time()*1000) metronome_interval = int(100 * 60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0)", "rt global program_time # do_metronome_action() if click1 == 1: window.text.set(real_time())", "rt += base did_change = False if data2['auto_start'] == 'true':", "time_count global metronome_armed time_count += 1 update_time() if metronome_armed or", "True), (\"Enter Nether\", True), (\"Find Fortress\", True), (\"Find Spawner\", True),", "== amount: ig = 0 return run_time[:-3] else: did_change =", "base_time elapsed = end_time - start_time time.sleep((metronome_interval - elapsed)/1000.0) #", "def real_time(): global rt global click1 global click2 global amount2", "= 0 click2 = 0 elif click2 == 0: click2", "= float(time.time()) - float(amount2) rtc = str(datetime.timedelta(seconds=rt)) stage = 1", "1 click2 = 1 stage = 0 reset_split() return '0:00:00.000'", "timer # # class Category: # def __init__(): # self.actions", "click2 global count global did_change count = 0 did_change =", "update_split() def cycle(event): global ind, stage ind += 1 item", "False), ([ \"0-15\", \"15-30\", \"30-45\", \"45-60\", \"60-75\", \"75+\" ], False),", "for d in os.listdir(directory)], key=os.path.getmtime) if system_type == \"Linux\" or", "data2['show_igt'] == 'true': greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack()", "play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions ''' def arm_metronome(event): global", "False def run_metronome(): global metronome_time global metronome_interval global metronome_running if", "print(\"hehe\") global base write_to_log(\"reset {}\".format(str(amount2-base))) base = amount2 def increment_counter():", "# bg.bind(data2['start_metronome'], start_metronome) ''' this works for the window detecting", "([ \"Iron\", \"Logs\", \"Feathers\", \"Wool\", \"Gravel\" ], True), (\"Enter Nether\",", "time.time() old_version = False did_change = False count = 0", "= {} json_file = 'mct_config.json' with open(json_file) as json_file: data2", "file. As long as you retain this notice you can", "str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3] # print(diff_txt) window.text.set(diff_txt) # print(base) if", "start_metronome(None) if \"arm_metronome\" in txt: metronome_armed = True if \"pause_timer\"", "enum import Enum import copy #\"THE BEER-WARE LICENSE\" (Revision 42):", "= 1 print(\"stop\") return rtc[:-3] else: ig = 0 rt2", "clicked2(event): right_click() def clicked(event): left_click() def write_to_log(text): pass # log_dir", "click2 = 1 stage = 0 reset_split() return '0:00:00.000' elif", "# print(amount) amount2 = float(amount - world_base_time) / 20 #", "did_change = True print(latest + \"\\nTime: \" + run_time) last_amount", "base write_to_log(\"reset {}\".format(str(amount2-base))) base = amount2 def increment_counter(): global count", "end_time = round(time.time()*1000) - base_time elapsed = end_time - start_time", "== 1: rt = time.time() click1 = 1 click2 =", "def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions", "(\"Find Fortress\", True), (\"Find Spawner\", True), (\"Exit Spawner\", True), (\"Exit", "# print(button) if pressed: if pressed and button == mouse.Button.right:", "arm_metronome(event): global metronome_armed global metronome_running if metronome_armed or metronome_running: return", "''' def arm_metronome(event): global metronome_armed global metronome_running if metronome_armed or", "in txt: right_click() def update_time2(): window.text2.set(get_time()) window.after(1000, update_time2) def update_count():", "global stage global ig global did_change if data2['auto_adjust'] == 'true':", "= amount2 def increment_counter(): global count count += 1 '''", "message, reset the timer # # class Category: # def", "the timer # # class Category: # def __init__(): #", "(\"Tower Build Start\", True), (\"Tower Build Finished\", True), (\"Tower Leave\",", "input_fil.read_text() input_fil.write_text(\"\") global metronome_armed # print(txt) if \"start_metronome\" in txt:", "+= 1 ''' METRONOME CODE ''' ''' Metronome mouse listener", "now {}\".format(world_base_time)) # print(amount) amount2 = float(amount - world_base_time) /", "base = amount2 def right_click(): global click1 global click2 global", "- base_time do_metronome_action() end_time = round(time.time()*1000) - base_time elapsed =", "def update_time2(): window.text2.set(get_time()) window.after(1000, update_time2) def update_count(): count_str = str(count)", "item: amount = item[\"1100\"] # print(amount) latest = max([os.path.join(directory,d) for", "count_str = str(count) text_str = \"\" for i in range(0,", "metronome_running if metronome_armed or metronome_running: return metronome_armed = True #", "update_time2(): window.text2.set(get_time()) window.after(1000, update_time2) def update_count(): count_str = str(count) text_str", "increment_counter() def clicked3(event): sys.exit(1) def clicked2(event): right_click() def clicked(event): left_click()", "\"Linux\" or system_type == \"Darwin\": os.chdir(latest + '/stats/') else: os.chdir(latest", "0 ig = 0 base = 0 program_time = 0", "data2['enable_metronome'] == 'true': # bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome) #", "ind, stage ind += 1 item = cur_stages[stage] if type(item[0])", "json import glob import datetime import time import threading import", "stuff is worth it, you can buy me a beer", "False if data2['auto_start'] == 'true': if ig == 1: rt", "clicked2) greeting.after(0, tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos'])", "metronome_start_time if metronome_time >= metronome_interval * metronome_beats: metronome_running = False", "program_time = 0 metronome_armed = False metronome_running = False metronome_active", "system_type = platform.system() if system_type == 'Linux': directory = os.path.expanduser(data2['linux_saves'])", "- float(base) # rtc = str(datetime.timedelta(seconds=rt)) return rtc[:-3] else: if", "in range(0, int(NUM_CHARS/2)): text_str += \" \" text_str += count_str", "greeting.after(0, update_count) # bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'], on_press2) # if", "stage = 0 reset_split() return '0:00:00.000' elif click1 == 1:", "global program_time # do_metronome_action() if click1 == 1: window.text.set(real_time()) elif", "def exit_handler(): global listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def listen_for_right_click(): def", "tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def", "float(amount - world_base_time) / 20 # print(amount2) run_time = str(datetime.timedelta(seconds=amount2,", "= Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" # continuously read from input file", "== 0: check_input() window.after(rta_update, tick_time) def check_input(): txt = input_fil.read_text()", "tk.StringVar() window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt = time.time()", "play_file_named(str_name): playsound((running_path / str_name).as_posix(), block = True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\")", "tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'],", "data = json.load(timer_file) for item in data[\"stats-change\"]: if \"1100\" in", "/ metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while metronome_running: start_time = round(time.time()*1000)", "rt rtc = str(datetime.timedelta(seconds=real_time)) # rt = float(amount2) - float(base)", "# print(latest) if latest != cur_fil: cur_fil = latest world_base_time", "1 click2 = 1 else: click1 = 0 click2 =", "data2['igt_font_size'] font_modifiers = data2['font_modifiers'] rta_font = (font_name, rta_font_size, font_modifiers) igt_font", "you want with this stuff. #If we meet some day,", "# continuously read from input file every 10ms # when", "exit_handler(): global listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def listen_for_right_click(): def on_click(x,", "== 'false': try: global cur_fil global world_base_time mc_dir = Path(directory).parent", "== 0: stage += 1 ind = 0 update_split() return", "system_type == 'Darwin': directory = os.path.expanduser(data2['mac_saves']) elif system_type == 'Windows':", "import time import threading import tkinter as tk from pynput", "return rtc[:-3] else: if click1 == 1: rt2 = time.time()", "# greeting.after(0, update_count) if data2['use_splits'] == 'true': split_font_size = data2['split_font_size']", "# window.after(int(data2['rta_update'])/10, update_time) def tick_time(): global time_count global metronome_armed time_count", "= os.path.expanduser(data2['windows_saves']) amount2 = 0 last_amount = 0 window =", "mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def listen_for_right_click(): def on_click(x, y, button, pressed):", "(metronome_time % (metronome_interval*4)) == metronome_interval*3: # print(\"up beep\") play_up_beep() #", "textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0, update_count) if data2['use_splits']", "tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0,", "# def write(): # class Actions(Enum): # CREATE_WORLD = 0", "json_file = glob.glob('*.json') timer = json_file[0] with open(timer) as json_file:", "0 base = 0 program_time = 0 metronome_armed = False", "print(\"armed and ready\") def start_metronome(event): run_metronome() # print(metronome_running) # arm_metronome", "did_change: rt = float(time.time()) - float(amount2) if data2['allow_offset'] == 'true':", "class Category: # def __init__(): # self.actions = [] #", "skip(event): global stage stage += 1 update_split() def on_increment_counter(event): increment_counter()", "global listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def listen_for_right_click(): def on_click(x, y,", "def update_count(): count_str = str(count) text_str = \"\" for i", "= Path.cwd() NUM_CHARS = 11 system_type = platform.system() if system_type", "= 0 cur_fil = None world_base_time = 0 def get_time():", "count_str for i in range(0, int(NUM_CHARS/2)): text_str += \" \"", "listener ''' def exit_handler(): global listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def", "# x.start() listen_for_right_click() print(\"armed and ready\") def start_metronome(event): run_metronome() #", "+= base did_change = False if data2['auto_start'] == 'true': if", "global metronome_armed global metronome_running if metronome_armed or metronome_running: return metronome_armed", "return metronome_armed = True # x = threading.Thread(target=listen_for_right_click, daemon=True) #", "type(item[0]) == type([]): if ind == len(item[0]): ind = 0", "# print(\"up beep\") play_up_beep() # pass else: # print(\"normal beep\")", "= 0 elif click2 == 0: click2 = 1 click1", "greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack() if data2['show_igt'] ==", "0 did_change = True if click2 == 1: click1 =", "''' ''' Metronome mouse listener ''' def exit_handler(): global listener", "# print(time.time()*1000) if metronome_time % metronome_interval == 0: if (metronome_time", "1: window.text.set(real_time()) elif click1 == 0: # rt = time.time()", "print(button) if pressed: if pressed and button == mouse.Button.right: start_metronome(None)", "bg.gbind(data2['split'], split) # bg.gbind(data2['skip'], skip) reset_split() # greeting.after(0, update_count) #", "global base # write_to_log(str(amount2-base)) # base = amount2 def right_click():", "it, you can buy me a beer in return input_fil", "split(event): global stage, ind item = cur_stages[stage] if item[1]: if", "sys import platform import json import glob import datetime import", "do_metronome_action(): global metronome_running global metronome_interval if not metronome_running: return #", "input_fil.write_text(\"\") global metronome_armed # print(txt) if \"start_metronome\" in txt: print(data2['enable_metronome'])", "\" text_str += count_str for i in range(0, int(NUM_CHARS/2)): text_str", "42): #bleach86 wrote this file. As long as you retain", "os.path.expanduser(data2['mac_saves']) elif system_type == 'Windows': directory = os.path.expanduser(data2['windows_saves']) amount2 =", "else: did_change = True # print(latest + \"\\nTime: \" +", "run_time[:-3] else: did_change = True print(latest + \"\\nTime: \" +", "cur_stages = {} json_file = 'mct_config.json' with open(json_file) as json_file:", "== type([]): item[0].remove(item[0][ind]) if len(item[0]) == 0: stage += 1", "CODE ''' ''' Metronome mouse listener ''' def exit_handler(): global", "== 'Windows': directory = os.path.expanduser(data2['windows_saves']) amount2 = 0 last_amount =", "playing code ''' def play_file_named(str_name): playsound((running_path / str_name).as_posix(), block =", "elif click1 == 0: click1 = 0 # global base", "playsound from enum import Enum import copy #\"THE BEER-WARE LICENSE\"", "amount2 = float(amount) / 20 run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if", "= data2['rta_font_size'] igt_font_size = data2['igt_font_size'] font_modifiers = data2['font_modifiers'] rta_font =", "did_change = True if click2 == 1: click1 = 0", "directory = os.path.expanduser(data2['windows_saves']) amount2 = 0 last_amount = 0 window", "this stuff is worth it, you can buy me a", "#bleach86 wrote this file. As long as you retain this", "= cur_stages[stage][0] if type(text_str) == type([]): text_str = text_str[ind] window.text4.set(text_str)", "play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions ''' def arm_metronome(event): global metronome_armed global", "mouse.Button.right: start_metronome(None) return False # mouse.Listener.stop(listener) # print(\"Right Click Detected", "# print(amount) latest = max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime)", "right_click() def update_time2(): window.text2.set(get_time()) window.after(1000, update_time2) def update_count(): count_str =", "update_split() def skip(event): global stage stage += 1 update_split() def", "== 'Darwin': directory = os.path.expanduser(data2['mac_saves']) elif system_type == 'Windows': directory", "'0:00:00.000' elif click1 == 1: if old_version == True and", "ig == 1: rt = time.time() click1 = 1 click2", "([ \"0-15\", \"15-30\", \"30-45\", \"45-60\", \"60-75\", \"75+\" ], False), ([", "End\", True), (\"Finish\", True) ] cur_stages = {} json_file =", "== 'true': rt += base did_change = False if data2['auto_start']", "last_amount global old_version global amount2 global ig global did_change #", "+= 1 update_split() def on_increment_counter(event): increment_counter() def clicked3(event): sys.exit(1) def", "global click1 if click1 == 1: click1 = 0 elif", "Leave\", True), (\"Enter Stronghold\", True), (\"Enter End\", True), (\"Finish\", True)", "- start_time time.sleep((metronome_interval - elapsed)/1000.0) # print(\"{} {} {}\".format(start_time, end_time,", "{}\".format(str(amount2-base))) base = amount2 def increment_counter(): global count count +=", "print(txt) if \"start_metronome\" in txt: print(data2['enable_metronome']) if data2['enable_metronome'] == 'true':", "as listener: # listener.start() listener.join() ''' Sound playing code '''", "time_count += 1 update_time() if metronome_armed or time_count % 20", "mouse from pathlib import Path from playsound import playsound from", "(\"Exit Nether\", True), (\"Tower Build Start\", True), (\"Tower Build Finished\",", "amount2 - base rtc = str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3] #", "real_time = rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) # rt", "True and stage == 0: ig = 0 rt =", "== 'true': split_font_size = data2['split_font_size'] split_font = (font_name, split_font_size, font_modifiers)", "= 0 update_split() def split(event): global stage, ind item =", "= False def run_metronome(): global metronome_time global metronome_interval global metronome_running", "= 0 metronome_armed = False metronome_running = False metronome_active =", "amount2 def right_click(): global click1 global click2 global count global", "data2['auto_adjust'] == 'true': # print(did_change) # print(base) if did_change: rt", "print(metronome_interval) # print(time.time()*1000) if metronome_time % metronome_interval == 0: if", "0 last_amount = 0 window = tk.Tk() # bg =", "d in os.listdir(directory)], key=os.path.getmtime) if system_type == \"Linux\" or system_type", "greeting.after(0, update_count) if data2['use_splits'] == 'true': split_font_size = data2['split_font_size'] split_font", "base rtc = str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3] # print(diff_txt) window.text.set(diff_txt)", "== mouse.Button.right: start_metronome(None) return False # mouse.Listener.stop(listener) # print(\"Right Click", "split_font_size, font_modifiers) greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack() #", "# print(diff_txt) window.text.set(diff_txt) # print(base) if click2 == 0: rt", "txt: metronome_armed = True if \"pause_timer\" in txt: left_click() if", "elapsed = end_time - start_time time.sleep((metronome_interval - elapsed)/1000.0) # print(\"{}", "{}\".format(start_time, end_time, )) metronome_time += metronome_interval def do_metronome_action(): global metronome_running", "left_click() def write_to_log(text): pass # log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil", "if data2['use_splits'] == 'true': split_font_size = data2['split_font_size'] split_font = (font_name,", "print(time.time()*1000) if metronome_time % metronome_interval == 0: if (metronome_time %", "check_input() window.after(rta_update, tick_time) def check_input(): txt = input_fil.read_text() input_fil.write_text(\"\") global", "stage, cur_stages ind = 0 stage = 0 cur_stages =", "if system_type == \"Linux\" or system_type == \"Darwin\": os.chdir(latest +", "= True if click2 == 1: click1 = 0 click2", "right_click() def update_split(): global stage text_str = cur_stages[stage][0] if type(text_str)", "START = 1 # class Attempt: stage = 0 ind", "= program_time - metronome_start_time if metronome_time >= metronome_interval * metronome_beats:", "in os.listdir(directory)], key=os.path.getmtime) if system_type == \"Linux\" or system_type ==", "cur_fil = None world_base_time = 0 def get_time(): global last_amount", "import json import glob import datetime import time import threading", "# rt = float(amount2) - float(base) # rtc = str(datetime.timedelta(seconds=rt))", "base = amount2 def increment_counter(): global count count += 1", "play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions ''' def", "# print(metronome_interval) # metronome_time = program_time - metronome_start_time if metronome_time", "key=os.path.getmtime) # print(latest) if latest != cur_fil: cur_fil = latest", "tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack() if data2['show_igt'] == 'true': greeting2", "as json_file: data2 = json.load(json_file) if data2['borderless'] == 'true': data2['borderless']", "# pass else: # print(\"normal beep\") play_normal_beep() # pass #", "= data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount = data['stat.playOneMinute'] old_version = True json_file.close()", "# print() def real_time(): global rt global click1 global click2", "== 'true': data2['borderless'] else: data2['borderless'] = False running_path = Path.cwd()", "ind = 0 stage = 0 cur_stages = copy.deepcopy(rsg) update_split()", "amount = item[\"1100\"] # print(amount) latest = max([os.path.join(directory,d) for d", "None metronome_time = 0 base_update = int(data2['base_update']) rta_update = int(data2['rta_update'])", "wrote this file. As long as you retain this notice", "metronome_interval def do_metronome_action(): global metronome_running global metronome_interval if not metronome_running:", "elif click1 == 0: # rt = time.time() diff =", "cycle(event): global ind, stage ind += 1 item = cur_stages[stage]", "True), (\"Tower Build Start\", True), (\"Tower Build Finished\", True), (\"Tower", "(\"World Created\", True), ([ \"Savannah\", \"Desert\", \"Plains\", \"Other\" ], False),", "\"0-15\", \"15-30\", \"30-45\", \"45-60\", \"60-75\", \"75+\" ], False), ([ \"Iron\",", "rt2 = time.time() real_time = rt2 - rt rtc =", "time import threading import tkinter as tk from pynput import", "rt = time.time() click1 = 1 click2 = 1 stage", "= 0 rt = float(time.time()) - float(amount2) rtc = str(datetime.timedelta(seconds=rt))", "log_fil = log_dir / data2[\"current_section\"] # log_fil.touch() # log_fil =", "rta_update = int(data2['rta_update']) * base_update metronome_bpm = int(data2['metronome_bpm']) metronome_interval =", "if data2['enable_metronome'] == 'true': # bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome)", "play_up_beep() # pass else: # print(\"normal beep\") play_normal_beep() # pass", "ready\") def start_metronome(event): run_metronome() # print(metronome_running) # arm_metronome = False", "ind item = cur_stages[stage] if item[1]: if type(item[0]) == type([]):", "if click2 == 0: rt = time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10,", "def clicked2(event): right_click() def clicked(event): left_click() def write_to_log(text): pass #", "Path.cwd() NUM_CHARS = 11 system_type = platform.system() if system_type ==", "platform.system() if system_type == 'Linux': directory = os.path.expanduser(data2['linux_saves']) elif system_type", "= tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter) #", "metronome_time += metronome_interval def do_metronome_action(): global metronome_running global metronome_interval if", "= 0 with open(stats_file) as timer_file: # print(timer_file) data =", "update_split(): global stage text_str = cur_stages[stage][0] if type(text_str) == type([]):", "item[1]: if type(item[0]) == type([]): item[0].remove(item[0][ind]) if len(item[0]) == 0:", "right click ''' # window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2)", "try: global cur_fil global world_base_time mc_dir = Path(directory).parent stats_dir =", "txt: print(data2['enable_metronome']) if data2['enable_metronome'] == 'true': start_metronome(None) if \"arm_metronome\" in", "metronome_armed time_count += 1 update_time() if metronome_armed or time_count %", "data2[\"current_section\"] # log_fil.touch() # log_fil = log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def", "stats_file = json_file[0] amount = 0 with open(stats_file) as timer_file:", "update_split() def split(event): global stage, ind item = cur_stages[stage] if", "1 # print(float(amount2)) # print(\"hehe\") global base write_to_log(\"reset {}\".format(str(amount2-base))) base", "in os.listdir(directory)], key=os.path.getmtime) # print(latest) if latest != cur_fil: cur_fil", "window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def update_time(): global rt global program_time #", "== 'true': # print(did_change) # print(base) if did_change: rt =", "20 run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount == amount: ig", "(\"Enter End\", True), (\"Finish\", True) ] cur_stages = {} json_file", "listen_for_right_click() print(\"armed and ready\") def start_metronome(event): run_metronome() # print(metronome_running) #", "def increment_counter(): global count count += 1 ''' METRONOME CODE", "== 'true': click1 = 1 click2 = 1 else: click1", "# if data2['enable_metronome'] == 'true': # bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'],", "play_metronome_preset() metronome_running = False return metronome_time = 0 base_time =", "0 rsg = [ (\"World Created\", True), ([ \"Savannah\", \"Desert\",", "textvariable=window.text2) greeting2.pack() if data2['use_counter'] == 'true': greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'],", "stage += 1 ind = 0 update_split() return stage +=", "import platform import json import glob import datetime import time", "as tk from pynput import mouse from pathlib import Path", "atexit.register(exit_handler) def listen_for_right_click(): def on_click(x, y, button, pressed): # print(button)", "0: stage += 1 ind = 0 update_split() return stage", "data2['split_font_size'] split_font = (font_name, split_font_size, font_modifiers) greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'],", "in data[\"stats-change\"]: if \"1100\" in item: amount = item[\"1100\"] #", "this file. As long as you retain this notice you", "= 0 def get_time(): global last_amount global old_version global amount2", "= 0 rsg = [ (\"World Created\", True), ([ \"Savannah\",", "% metronome_interval == 0: if (metronome_time % (metronome_interval*4)) == metronome_interval*3:", "int(100 * 60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while metronome_running:", "= item[\"1100\"] # print(amount) latest = max([os.path.join(directory,d) for d in", "print(time.time()*1000) # print() def real_time(): global rt global click1 global", "def run_metronome(): global metronome_time global metronome_interval global metronome_running if data2['has_metronome_preset']", "= (font_name, split_font_size, font_modifiers) greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4)", "on_increment_counter) # greeting.after(0, update_count) if data2['use_splits'] == 'true': split_font_size =", "# print(time.time()*1000) # print() def real_time(): global rt global click1", "'Linux': directory = os.path.expanduser(data2['linux_saves']) elif system_type == 'Darwin': directory =", "import mouse from pathlib import Path from playsound import playsound", "running_path = Path.cwd() NUM_CHARS = 11 system_type = platform.system() if", "bg = BindGlobal(widget=window) window.text = tk.StringVar() window.text2 = tk.StringVar() window.text3", "rt = float(time.time()) - float(amount2) if data2['allow_offset'] == 'true': rt", "greeting.pack() if data2['show_igt'] == 'true': greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font,", "ind = 0 time_count = 0 rsg = [ (\"World", "[] # # convert actions to attempts # def read():", "# mouse.Listener.stop(listener) # print(\"Right Click Detected (pressed)\") with mouse.Listener(on_click=on_click) as", "= time.time() old_version = False did_change = False count =", "start_metronome) ''' this works for the window detecting right click", "ind = 0 else: ind = 0 update_split() def split(event):", "font_modifiers = data2['font_modifiers'] rta_font = (font_name, rta_font_size, font_modifiers) igt_font =", "mouse listener ''' def exit_handler(): global listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler)", "this stuff. #If we meet some day, and you think", "1 item = cur_stages[stage] if type(item[0]) == type([]): if ind", "0 return run_time[:-3] except: ig = 1 return '0:00:00.000' else:", "\"Desert\", \"Plains\", \"Other\" ], False), ([ \"0-15\", \"15-30\", \"30-45\", \"45-60\",", "update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def update_time(): global", "button == mouse.Button.right: start_metronome(None) return False # mouse.Listener.stop(listener) # print(\"Right", "else: did_change = True print(latest + \"\\nTime: \" + run_time)", "# do_metronome_action() if click1 == 1: window.text.set(real_time()) elif click1 ==", "clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True)", "buy me a beer in return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") /", "= False count = 0 ig = 0 base =", "actions to attempts # def read(): # def write(): #", "0 return run_time[:-3] else: did_change = True print(latest + \"\\nTime:", "on_press2) # if data2['enable_metronome'] == 'true': # bg.gbind(data2['arm_metronome'], arm_metronome) #", "bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'],", "# def update_split() def on_press(event): left_click() def on_press2(event): right_click() def", "import copy #\"THE BEER-WARE LICENSE\" (Revision 42): #bleach86 wrote this", "False metronome_beats = int(data2['metronome_beats']) listener = None metronome_time = 0", "do_metronome_action() end_time = round(time.time()*1000) - base_time elapsed = end_time -", "data2[\"height\"])) window.configure(bg='black') rt = time.time() old_version = False did_change =", "metronome_running = False metronome_active = False metronome_beats = int(data2['metronome_beats']) listener", "= False metronome_running = False metronome_active = False metronome_beats =", "= float(time.time()) - float(amount2) if data2['allow_offset'] == 'true': rt +=", "data2['font_modifiers'] rta_font = (font_name, rta_font_size, font_modifiers) igt_font = (font_name, igt_font_size,", "metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while metronome_running: start_time = round(time.time()*1000) -", "= round(time.time()*1000) - base_time elapsed = end_time - start_time time.sleep((metronome_interval", "reset_split() # greeting.after(0, update_count) # bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'], on_press2)", "run_time[:-3] except: ig = 1 return '0:00:00.000' def window2(): font_name", "#window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost', True) window.overrideredirect(data2['borderless'])", "elif click2 == 0: click2 = 1 click1 = 1", "ig = 0 rt = float(time.time()) - float(amount2) rtc =", "open(stats_file) as timer_file: # print(timer_file) data = json.load(timer_file) for item", "(Revision 42): #bleach86 wrote this file. As long as you", "textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'], split) # bg.gbind(data2['skip'],", "stage += 1 ind = 0 update_split() def skip(event): global", "if click2 == 1: click1 = 0 click2 = 0", "with open(timer) as json_file: data = json.load(json_file) try: amount =", "if latest != cur_fil: cur_fil = latest world_base_time = amount", "import glob import datetime import time import threading import tkinter", "metronome_time = 0 base_time = round(time.time()*1000) metronome_interval = int(100 *", "if click1 == 1: window.text.set(real_time()) elif click1 == 0: #", "last_amount = amount ig = 0 return run_time[:-3] except: ig", "file every 10ms # when you get a \"reset timer\"", "= amount # print(\"world base time now {}\".format(world_base_time)) # print(amount)", "# print(base) if did_change: rt = float(time.time()) - float(amount2) if", "get a \"reset timer\" message, reset the timer # #", "# def __init__(): # self.actions = [] # self.attempts =", "# print(\"Right Click Detected (pressed)\") with mouse.Listener(on_click=on_click) as listener: #", "global count count += 1 ''' METRONOME CODE ''' '''", "\"Darwin\": os.chdir(latest + '/stats/') else: os.chdir(latest + '\\\\stats\\\\') json_file =", "\"Gravel\" ], True), (\"Enter Nether\", True), (\"Find Fortress\", True), (\"Find", "if old_version == True and stage == 0: ig =", "+= 1 ind = 0 update_split() return stage += 1", "count = 0 ig = 0 base = 0 program_time", "with this stuff. #If we meet some day, and you", "0 cur_stages = copy.deepcopy(rsg) update_split() def cycle(event): global ind, stage", "bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'], split) # bg.gbind(data2['skip'], skip) reset_split() #", "os.chdir(latest + '\\\\stats\\\\') json_file = glob.glob('*.json') timer = json_file[0] with", "y, button, pressed): # print(button) if pressed: if pressed and", "tk.Tk() # bg = BindGlobal(widget=window) window.text = tk.StringVar() window.text2 =", "global ig global did_change if data2['auto_adjust'] == 'true': # print(did_change)", "True), (\"Find Spawner\", True), (\"Exit Spawner\", True), (\"Exit Nether\", True),", "= log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def left_click(): global click1 if click1", "= int(data2['rta_update']) * base_update metronome_bpm = int(data2['metronome_bpm']) metronome_interval = 0", "right_click() def clicked(event): left_click() def write_to_log(text): pass # log_dir =", "str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time) if last_amount == amount: ig =", "elif click1 == 1: if old_version == True and stage", "global stage text_str = cur_stages[stage][0] if type(text_str) == type([]): text_str", "], False), ([ \"0-15\", \"15-30\", \"30-45\", \"45-60\", \"60-75\", \"75+\" ],", "round(time.time()*1000) - base_time elapsed = end_time - start_time time.sleep((metronome_interval -", "atexit import os import sys import platform import json import", "txt = input_fil.read_text() input_fil.write_text(\"\") global metronome_armed # print(txt) if \"start_metronome\"", "+ '/stats/') else: os.chdir(latest + '\\\\stats\\\\') json_file = glob.glob('*.json') timer", "import playsound from enum import Enum import copy #\"THE BEER-WARE", "metronome_beats = int(data2['metronome_beats']) listener = None metronome_time = 0 base_update", "window.mainloop() def update_time(): global rt global program_time # do_metronome_action() if", "print(\"-------------------------\") if data2['1.7+'] == 'false': try: global cur_fil global world_base_time", "click1 = 1 click2 = 1 stage = 0 reset_split()", "bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'], start_metronome) ''' this", "def skip(event): global stage stage += 1 update_split() def on_increment_counter(event):", "== 1: click1 = 0 elif click1 == 0: click1", "json_file = 'mct_config.json' with open(json_file) as json_file: data2 = json.load(json_file)", "type(text_str) == type([]): text_str = text_str[ind] window.text4.set(text_str) def reset_split(): global", "datetime import time import threading import tkinter as tk from", "tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt = time.time() old_version = False", "time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions ''' def arm_metronome(event): global metronome_armed", "== len(item[0]): ind = 0 else: ind = 0 update_split()", "data2['has_metronome_preset'] == 'true': play_metronome_preset() metronome_running = False return metronome_time =", "\" \" window.text3.set(text_str) window.after(rta_update, update_count) # def update_split() def on_press(event):", "global rt global program_time # do_metronome_action() if click1 == 1:", "else: if click1 == 1: rt2 = time.time() real_time =", "if click1 == 1: click1 = 0 elif click1 ==", "= str(count) text_str = \"\" for i in range(0, int(NUM_CHARS/2)):", "old_version == True and stage == 0: ig = 0", "= 1 click1 = 1 # print(float(amount2)) # print(\"hehe\") global", "1: rt = time.time() click1 = 1 click2 = 1", "= [] # # convert actions to attempts # def", "update_split() return stage += 1 ind = 0 update_split() def", "float(base) # rtc = str(datetime.timedelta(seconds=rt)) return rtc[:-3] else: if click1", "directory = os.path.expanduser(data2['linux_saves']) elif system_type == 'Darwin': directory = os.path.expanduser(data2['mac_saves'])", "ind = 0 update_split() def split(event): global stage, ind item", "meet some day, and you think this stuff is worth", "0 return run_time[:-3] except: ig = 1 return '0:00:00.000' def", "'/stats/') else: os.chdir(latest + '\\\\stats\\\\') json_file = glob.glob('*.json') timer =", "Path from playsound import playsound from enum import Enum import", "Category: # def __init__(): # self.actions = [] # self.attempts", "'\\\\stats\\\\') json_file = glob.glob('*.json') timer = json_file[0] with open(timer) as", "1 update_time() if metronome_armed or time_count % 20 == 0:", "if data2['borderless'] == 'true': data2['borderless'] else: data2['borderless'] = False running_path", "text_str = \"\" for i in range(0, int(NUM_CHARS/2)): text_str +=", "Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" # continuously read from input file every", "base did_change = False if data2['auto_start'] == 'true': if ig", "else: try: latest = max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime)", "for i in range(0, int(NUM_CHARS/2)): text_str += \" \" text_str", "amount # print(\"world base time now {}\".format(world_base_time)) # print(amount) amount2", "try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount = data['stat.playOneMinute'] old_version =", "mc_dir / \"stats\" os.chdir(stats_dir) json_file = glob.glob('*.dat') stats_file = json_file[0]", "when you get a \"reset timer\" message, reset the timer", "window.after(1000, update_time2) def update_count(): count_str = str(count) text_str = \"\"", "return stage += 1 ind = 0 update_split() def skip(event):", "0 cur_fil = None world_base_time = 0 def get_time(): global", "time.time() diff = amount2 - base rtc = str(datetime.timedelta(seconds=diff)) diff_txt", "= text_str[ind] window.text4.set(text_str) def reset_split(): global ind, stage, cur_stages ind", "def get_time(): global last_amount global old_version global amount2 global ig", "= max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) if system_type ==", "can do whatever you want with this stuff. #If we", "metronome_running global metronome_interval if not metronome_running: return # print(metronome_interval) #", "def right_click(): global click1 global click2 global count global did_change", "float(amount2) - float(base) # rtc = str(datetime.timedelta(seconds=rt)) return rtc[:-3] else:", "print(float(amount2)) # print(\"hehe\") global base write_to_log(\"reset {}\".format(str(amount2-base))) base = amount2", "click1 = 0 click2 = 0 elif click2 == 0:", "global metronome_running if data2['has_metronome_preset'] == 'true': play_metronome_preset() metronome_running = False", "# print(timer_file) data = json.load(timer_file) for item in data[\"stats-change\"]: if", "update_count) # bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'], on_press2) # if data2['enable_metronome']", "'true': greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack() if data2['use_counter']", "except: ig = 1 return '0:00:00.000' def window2(): font_name =", "- metronome_start_time if metronome_time >= metronome_interval * metronome_beats: metronome_running =", "Enum import copy #\"THE BEER-WARE LICENSE\" (Revision 42): #bleach86 wrote", "= True # x = threading.Thread(target=listen_for_right_click, daemon=True) # x.start() listen_for_right_click()", "0 update_split() def skip(event): global stage stage += 1 update_split()", "skip) reset_split() # greeting.after(0, update_count) # bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'],", "def reset_split(): global ind, stage, cur_stages ind = 0 stage", "log_fil.touch() # log_fil = log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def left_click(): global", "== type([]): if ind == len(item[0]): ind = 0 else:", "json_file = glob.glob('*.dat') stats_file = json_file[0] amount = 0 with", "= str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount == amount: ig = 0", "import threading import tkinter as tk from pynput import mouse", "log_dir / data2[\"current_section\"] # log_fil.touch() # log_fil = log_fil.open(\"a\") #", "font_modifiers) igt_font = (font_name, igt_font_size, font_modifiers) greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'],", "global did_change if data2['auto_adjust'] == 'true': # print(did_change) # print(base)", "11 system_type = platform.system() if system_type == 'Linux': directory =", "= 0 if data2['auto_start'] == 'true': click1 = 1 click2", "# bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'], split) # bg.gbind(data2['skip'], skip) reset_split()", "== 0: if (metronome_time % (metronome_interval*4)) == metronome_interval*3: # print(\"up", "= int(data2['base_update']) rta_update = int(data2['rta_update']) * base_update metronome_bpm = int(data2['metronome_bpm'])", "as timer_file: # print(timer_file) data = json.load(timer_file) for item in", "for item in data[\"stats-change\"]: if \"1100\" in item: amount =", "= 1 return '0:00:00.000' def window2(): font_name = data2['font_name'] rta_font_size", "x.start() listen_for_right_click() print(\"armed and ready\") def start_metronome(event): run_metronome() # print(metronome_running)", "and you think this stuff is worth it, you can", "1 else: click1 = 0 click2 = 0 cur_fil =", "# print(base) if click2 == 0: rt = time.time() window.text.set(\"0:00:00.000\")", "== 'true': greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack() if", "milliseconds=0.5)) # print(run_time) if last_amount == amount: ig = 0", "\"15-30\", \"30-45\", \"45-60\", \"60-75\", \"75+\" ], False), ([ \"Iron\", \"Logs\",", "= end_time - start_time time.sleep((metronome_interval - elapsed)/1000.0) # print(\"{} {}", "in return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" # continuously read", "igt_font = (font_name, igt_font_size, font_modifiers) greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font,", "LICENSE\" (Revision 42): #bleach86 wrote this file. As long as", "pressed): # print(button) if pressed: if pressed and button ==", "bg.gbind(data2['reset_start'], on_press2) # if data2['enable_metronome'] == 'true': # bg.gbind(data2['arm_metronome'], arm_metronome)", "you get a \"reset timer\" message, reset the timer #", "in range(0, int(NUM_CHARS/2)): text_str += \" \" window.text3.set(text_str) window.after(rta_update, update_count)", "else: ig = 0 rt2 = time.time() real_time = rt2", "amount2 = 0 last_amount = 0 window = tk.Tk() #", "you retain this notice you can do whatever you want", "did_change if data2['auto_adjust'] == 'true': # print(did_change) # print(base) if", "/ str_name).as_posix(), block = True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep():", "= 0 update_split() def skip(event): global stage stage += 1", "= tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt = time.time() old_version =", "ind = 0 update_split() def skip(event): global stage stage +=", "if data2['use_counter'] == 'true': greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3)", "True # print(latest + \"\\nTime: \" + run_time) last_amount =", "old_version = True json_file.close() amount2 = float(amount) / 20 run_time", "# class Actions(Enum): # CREATE_WORLD = 0 # START =", "global did_change count = 0 did_change = True if click2", "= True # print(latest + \"\\nTime: \" + run_time) last_amount", "= int(data2['metronome_beats']) listener = None metronome_time = 0 base_update =", "item = cur_stages[stage] if item[1]: if type(item[0]) == type([]): item[0].remove(item[0][ind])", "return '0:00:00.000' elif click1 == 1: if old_version == True", "sys.exit(1) def clicked2(event): right_click() def clicked(event): left_click() def write_to_log(text): pass", "we meet some day, and you think this stuff is", "= 0 return run_time[:-3] except: ig = 1 return '0:00:00.000'", "= glob.glob('*.dat') stats_file = json_file[0] amount = 0 with open(stats_file)", "global metronome_interval if not metronome_running: return # print(metronome_interval) # metronome_time", "if type(item[0]) == type([]): item[0].remove(item[0][ind]) if len(item[0]) == 0: stage", "run_time[:-3] except: ig = 1 return '0:00:00.000' else: try: latest", "Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil = log_dir / data2[\"current_section\"] # log_fil.touch() #", "BEER-WARE LICENSE\" (Revision 42): #bleach86 wrote this file. As long", "or system_type == \"Darwin\": os.chdir(latest + '/stats/') else: os.chdir(latest +", "data2['rta_font_size'] igt_font_size = data2['igt_font_size'] font_modifiers = data2['font_modifiers'] rta_font = (font_name,", "= None world_base_time = 0 def get_time(): global last_amount global", "'true': data2['borderless'] else: data2['borderless'] = False running_path = Path.cwd() NUM_CHARS", "cur_fil global world_base_time mc_dir = Path(directory).parent stats_dir = mc_dir /", "stage = 0 cur_stages = copy.deepcopy(rsg) update_split() def cycle(event): global", "= time.time() real_time = rt2 - rt rtc = str(datetime.timedelta(seconds=real_time))", "x = threading.Thread(target=listen_for_right_click, daemon=True) # x.start() listen_for_right_click() print(\"armed and ready\")", "global stage, ind item = cur_stages[stage] if item[1]: if type(item[0])", "log_fil.write(str(text)+\"\\n\") def left_click(): global click1 if click1 == 1: click1", "arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'], start_metronome)", "stage == 0: ig = 0 rt = float(time.time()) -", "ig = 1 return '0:00:00.000' def window2(): font_name = data2['font_name']", "= data2['font_modifiers'] rta_font = (font_name, rta_font_size, font_modifiers) igt_font = (font_name,", "global metronome_armed # print(txt) if \"start_metronome\" in txt: print(data2['enable_metronome']) if", "+= metronome_interval def do_metronome_action(): global metronome_running global metronome_interval if not", "time now {}\".format(world_base_time)) # print(amount) amount2 = float(amount - world_base_time)", "#\"THE BEER-WARE LICENSE\" (Revision 42): #bleach86 wrote this file. As", "= platform.system() if system_type == 'Linux': directory = os.path.expanduser(data2['linux_saves']) elif", "== 'true': play_metronome_preset() metronome_running = False return metronome_time = 0", "metronome_running = False return metronome_time = 0 base_time = round(time.time()*1000)", "int(NUM_CHARS/2)): text_str += \" \" text_str += count_str for i", "metronome_interval if not metronome_running: return # print(metronome_interval) # metronome_time =", "run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time) if last_amount == amount:", "# log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil = log_dir / data2[\"current_section\"]", "] cur_stages = {} json_file = 'mct_config.json' with open(json_file) as", "tick_time(): global time_count global metronome_armed time_count += 1 update_time() if", "= data['stat.playOneMinute'] old_version = True json_file.close() amount2 = float(amount) /", "play_normal_beep() # pass # print(time.time()*1000) # print() def real_time(): global", "metronome_armed = True # x = threading.Thread(target=listen_for_right_click, daemon=True) # x.start()", "Build Finished\", True), (\"Tower Leave\", True), (\"Enter Stronghold\", True), (\"Enter", "Click Detected (pressed)\") with mouse.Listener(on_click=on_click) as listener: # listener.start() listener.join()", "listener = None metronome_time = 0 base_update = int(data2['base_update']) rta_update", "stage ind += 1 item = cur_stages[stage] if type(item[0]) ==", "write_to_log(str(amount2-base)) # base = amount2 def right_click(): global click1 global", "print() def real_time(): global rt global click1 global click2 global", "time_count % 20 == 0: check_input() window.after(rta_update, tick_time) def check_input():", "{} json_file = 'mct_config.json' with open(json_file) as json_file: data2 =", "max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) if system_type == \"Linux\"", "on_increment_counter(event): increment_counter() def clicked3(event): sys.exit(1) def clicked2(event): right_click() def clicked(event):", "if \"1100\" in item: amount = item[\"1100\"] # print(amount) latest", "- rt rtc = str(datetime.timedelta(seconds=real_time)) return rtc[:-3] def main(): window2()", "open(timer) as json_file: data = json.load(json_file) try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute']", "cycle) # bg.gbind(data2['split'], split) # bg.gbind(data2['skip'], skip) reset_split() # greeting.after(0,", "class Actions(Enum): # CREATE_WORLD = 0 # START = 1", "metronome_interval*3: # print(\"up beep\") play_up_beep() # pass else: # print(\"normal", "did_change = False if data2['auto_start'] == 'true': if ig ==", "window.configure(bg='black') rt = time.time() old_version = False did_change = False", "# when you get a \"reset timer\" message, reset the", "0: rt = time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time) def tick_time():", "cur_stages[stage] if type(item[0]) == type([]): if ind == len(item[0]): ind", "left_click(): global click1 if click1 == 1: click1 = 0", "== 'true': if ig == 1: rt = time.time() click1", "= 0 base_time = round(time.time()*1000) metronome_interval = int(100 * 60", "metronome_time >= metronome_interval * metronome_beats: metronome_running = False return #", "/ 20 # print(amount2) run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time)", "json_file[0] amount = 0 with open(stats_file) as timer_file: # print(timer_file)", "= data2['split_font_size'] split_font = (font_name, split_font_size, font_modifiers) greeting4 = tk.Label(fg=data2['split_color'],", "'false': try: global cur_fil global world_base_time mc_dir = Path(directory).parent stats_dir", "True), (\"Find Fortress\", True), (\"Find Spawner\", True), (\"Exit Spawner\", True),", "start_metronome(None) return False # mouse.Listener.stop(listener) # print(\"Right Click Detected (pressed)\")", "rtc = str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3] # print(diff_txt) window.text.set(diff_txt) #", "code ''' def play_file_named(str_name): playsound((running_path / str_name).as_posix(), block = True)", "1 update_split() def on_increment_counter(event): increment_counter() def clicked3(event): sys.exit(1) def clicked2(event):", "threading.Thread(target=listen_for_right_click, daemon=True) # x.start() listen_for_right_click() print(\"armed and ready\") def start_metronome(event):", ">= metronome_interval * metronome_beats: metronome_running = False return # print(metronome_time)", "True), (\"Exit Nether\", True), (\"Tower Build Start\", True), (\"Tower Build", "== 'true': greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack() #", "json_file.close() amount2 = float(amount) / 20 run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5))", "print(latest + \"\\nTime: \" + run_time) last_amount = amount ig", "= max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) # print(latest) if", "False return metronome_time = 0 base_time = round(time.time()*1000) metronome_interval =", "with open(stats_file) as timer_file: # print(timer_file) data = json.load(timer_file) for", "else: click1 = 0 click2 = 0 cur_fil = None", "get_time(): global last_amount global old_version global amount2 global ig global", "''' Metronome functions ''' def arm_metronome(event): global metronome_armed global metronome_running", "if did_change: rt = float(time.time()) - float(amount2) if data2['allow_offset'] ==", "long as you retain this notice you can do whatever", "window.after(rta_update, update_count) # def update_split() def on_press(event): left_click() def on_press2(event):", "amount2 = float(amount - world_base_time) / 20 # print(amount2) run_time", "click ''' # window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0,", "data2['borderless'] = False running_path = Path.cwd() NUM_CHARS = 11 system_type", "and ready\") def start_metronome(event): run_metronome() # print(metronome_running) # arm_metronome =", "real_time(): global rt global click1 global click2 global amount2 global", "end_time - start_time time.sleep((metronome_interval - elapsed)/1000.0) # print(\"{} {} {}\".format(start_time,", "data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount = data['stat.playOneMinute'] old_version = True json_file.close() amount2", "you can do whatever you want with this stuff. #If", "global click2 global amount2 global old_version global stage global ig", "window.text.set(real_time()) elif click1 == 0: # rt = time.time() diff", "'true': click1 = 1 click2 = 1 else: click1 =", "0 stage = 0 cur_stages = copy.deepcopy(rsg) update_split() def cycle(event):", "= json_file[0] with open(timer) as json_file: data = json.load(json_file) try:", "greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0, update_count) if data2['use_splits'] ==", "pathlib import Path from playsound import playsound from enum import", "data2['allow_offset'] == 'true': rt += base did_change = False if", "# convert actions to attempts # def read(): # def", "bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'], cycle) # bg.gbind(data2['split'], split)", "split) # bg.gbind(data2['skip'], skip) reset_split() # greeting.after(0, update_count) # bg.gbind(data2['pause'],", "global stage stage += 1 update_split() def on_increment_counter(event): increment_counter() def", "Detected (pressed)\") with mouse.Listener(on_click=on_click) as listener: # listener.start() listener.join() '''", "\"reset timer\" message, reset the timer # # class Category:", "time.time() real_time = rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) #", "global old_version global amount2 global ig global did_change # print(\"-------------------------\")", "on_press) # bg.gbind(data2['reset_start'], on_press2) # if data2['enable_metronome'] == 'true': #", "= tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack() if data2['use_counter'] == 'true':", "in txt: metronome_armed = True if \"pause_timer\" in txt: left_click()", "= latest world_base_time = amount # print(\"world base time now", "], False), ([ \"Iron\", \"Logs\", \"Feathers\", \"Wool\", \"Gravel\" ], True),", "a beer in return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" #", "update_count) # def update_split() def on_press(event): left_click() def on_press2(event): right_click()", "''' # window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time)", "global click2 global count global did_change count = 0 did_change", "= 11 system_type = platform.system() if system_type == 'Linux': directory", "run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount == amount: ig =", "= 0 # START = 1 # class Attempt: stage", "# print(metronome_time) # print(metronome_interval) # print(time.time()*1000) if metronome_time % metronome_interval", "type([]): item[0].remove(item[0][ind]) if len(item[0]) == 0: stage += 1 ind", "Nether\", True), (\"Find Fortress\", True), (\"Find Spawner\", True), (\"Exit Spawner\",", "= tk.StringVar() window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black') rt =", "\"Iron\", \"Logs\", \"Feathers\", \"Wool\", \"Gravel\" ], True), (\"Enter Nether\", True),", "\"Logs\", \"Feathers\", \"Wool\", \"Gravel\" ], True), (\"Enter Nether\", True), (\"Find", "worth it, you can buy me a beer in return", "# log_fil = log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def left_click(): global click1", "\"60-75\", \"75+\" ], False), ([ \"Iron\", \"Logs\", \"Feathers\", \"Wool\", \"Gravel\"", "old_version = False did_change = False count = 0 ig", "1 ind = 0 update_split() def skip(event): global stage stage", "listener mouse.Listener.stop(listener) window.quit() atexit.register(exit_handler) def listen_for_right_click(): def on_click(x, y, button,", "timer = json_file[0] with open(timer) as json_file: data = json.load(json_file)", "= 0 # global base # write_to_log(str(amount2-base)) # base =", "= cur_stages[stage] if type(item[0]) == type([]): if ind == len(item[0]):", "== 1: window.text.set(real_time()) elif click1 == 0: # rt =", "= 1 click2 = 1 else: click1 = 0 click2", "program_time - metronome_start_time if metronome_time >= metronome_interval * metronome_beats: metronome_running", "global cur_fil global world_base_time mc_dir = Path(directory).parent stats_dir = mc_dir", "update_time(): global rt global program_time # do_metronome_action() if click1 ==", "__init__(): # self.actions = [] # self.attempts = [] #", "and stage == 0: ig = 0 rt = float(time.time())", "key=os.path.getmtime) if system_type == \"Linux\" or system_type == \"Darwin\": os.chdir(latest", "text_str += \" \" window.text3.set(text_str) window.after(rta_update, update_count) # def update_split()", "# arm_metronome = False def run_metronome(): global metronome_time global metronome_interval", "= False return # print(metronome_time) # print(metronome_interval) # print(time.time()*1000) if", "= 0 return run_time[:-3] else: did_change = True # print(latest", "block = True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def", "# print(metronome_interval)555 while metronome_running: start_time = round(time.time()*1000) - base_time do_metronome_action()", "'Darwin': directory = os.path.expanduser(data2['mac_saves']) elif system_type == 'Windows': directory =", "d in os.listdir(directory)], key=os.path.getmtime) # print(latest) if latest != cur_fil:", "last_amount = 0 window = tk.Tk() # bg = BindGlobal(widget=window)", "if data2['has_metronome_preset'] == 'true': play_metronome_preset() metronome_running = False return metronome_time", "# print(\"world base time now {}\".format(world_base_time)) # print(amount) amount2 =", "reset_split() return '0:00:00.000' elif click1 == 1: if old_version ==", "rtc = str(datetime.timedelta(seconds=rt)) stage = 1 print(\"stop\") return rtc[:-3] else:", "data2 = json.load(json_file) if data2['borderless'] == 'true': data2['borderless'] else: data2['borderless']", "60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while metronome_running: start_time =", "str(datetime.timedelta(seconds=rt)) return rtc[:-3] else: if click1 == 1: rt2 =", "def __init__(): # self.actions = [] # self.attempts = []", "= cur_stages[stage] if item[1]: if type(item[0]) == type([]): item[0].remove(item[0][ind]) if", "= tk.StringVar() window.text2 = tk.StringVar() window.text3 = tk.StringVar() window.text4 =", "ig = 1 return '0:00:00.000' else: try: latest = max([os.path.join(directory,d)", "1 return '0:00:00.000' else: try: latest = max([os.path.join(directory,d) for d", "world_base_time) / 20 # print(amount2) run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) #", "# START = 1 # class Attempt: stage = 0", "def arm_metronome(event): global metronome_armed global metronome_running if metronome_armed or metronome_running:", "data2['auto_start'] == 'true': click1 = 1 click2 = 1 else:", "def update_time(): global rt global program_time # do_metronome_action() if click1", "== \"Linux\" or system_type == \"Darwin\": os.chdir(latest + '/stats/') else:", "os.path.expanduser(data2['linux_saves']) elif system_type == 'Darwin': directory = os.path.expanduser(data2['mac_saves']) elif system_type", "amount = data['stat.playOneMinute'] old_version = True json_file.close() amount2 = float(amount)", "= time.time() click1 = 1 click2 = 1 stage =", "''' Sound playing code ''' def play_file_named(str_name): playsound((running_path / str_name).as_posix(),", "count global did_change count = 0 did_change = True if", "window.text2 = tk.StringVar() window.text3 = tk.StringVar() window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"],", "1 print(\"stop\") return rtc[:-3] else: ig = 0 rt2 =", "clicked(event): left_click() def write_to_log(text): pass # log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") #", "# log_fil = log_dir / data2[\"current_section\"] # log_fil.touch() # log_fil", "timer\" message, reset the timer # # class Category: #", "item in data[\"stats-change\"]: if \"1100\" in item: amount = item[\"1100\"]", "click1 = 0 # global base # write_to_log(str(amount2-base)) # base", "count += 1 ''' METRONOME CODE ''' ''' Metronome mouse", "listener: # listener.start() listener.join() ''' Sound playing code ''' def", "item[0].remove(item[0][ind]) if len(item[0]) == 0: stage += 1 ind =", "run_metronome() # print(metronome_running) # arm_metronome = False def run_metronome(): global", "# rtc = str(datetime.timedelta(seconds=rt)) return rtc[:-3] else: if click1 ==", "- float(amount2) rtc = str(datetime.timedelta(seconds=rt)) stage = 1 print(\"stop\") return", "= threading.Thread(target=listen_for_right_click, daemon=True) # x.start() listen_for_right_click() print(\"armed and ready\") def", "can buy me a beer in return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\")", "= log_dir / data2[\"current_section\"] # log_fil.touch() # log_fil = log_fil.open(\"a\")", "diff_txt = rtc[:-3] # print(diff_txt) window.text.set(diff_txt) # print(base) if click2", "= int(100 * 60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555 while", "print(amount2) run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time) if last_amount ==", "last_amount == amount: ig = 0 return run_time[:-3] else: did_change", "ig = 0 return run_time[:-3] else: did_change = True print(latest", "True) window.overrideredirect(data2['borderless']) window.geometry(data2['window_pos']) window.mainloop() def update_time(): global rt global program_time", "print(\"stop\") return rtc[:-3] else: ig = 0 rt2 = time.time()", "= str(datetime.timedelta(seconds=real_time)) # rt = float(amount2) - float(base) # rtc", "stage += 1 update_split() def on_increment_counter(event): increment_counter() def clicked3(event): sys.exit(1)", "if metronome_armed or metronome_running: return metronome_armed = True # x", "\"\\nTime: \" + run_time) last_amount = amount ig = 0", "item[\"1100\"] # print(amount) latest = max([os.path.join(directory,d) for d in os.listdir(directory)],", "greeting2.pack() if data2['use_counter'] == 'true': greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font,", "# bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'], on_press2) # if data2['enable_metronome'] ==", "latest world_base_time = amount # print(\"world base time now {}\".format(world_base_time))", "metronome_time = 0 base_update = int(data2['base_update']) rta_update = int(data2['rta_update']) *", "elapsed)/1000.0) # print(\"{} {} {}\".format(start_time, end_time, )) metronome_time += metronome_interval", "'true': start_metronome(None) if \"arm_metronome\" in txt: metronome_armed = True if", "click1 = 0 click2 = 0 cur_fil = None world_base_time", "+= \" \" window.text3.set(text_str) window.after(rta_update, update_count) # def update_split() def", "reset the timer # # class Category: # def __init__():", "#window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\") window.attributes('-topmost',", "rt rtc = str(datetime.timedelta(seconds=real_time)) return rtc[:-3] def main(): window2() main()", "click1 == 1: click1 = 0 elif click1 == 0:", "json_file: data2 = json.load(json_file) if data2['borderless'] == 'true': data2['borderless'] else:", "(font_name, igt_font_size, font_modifiers) greeting = tk.Label(fg=data2['rta_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack()", "= False if data2['auto_start'] == 'true': if ig == 1:", "= amount2 - base rtc = str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3]", "= \"\" for i in range(0, int(NUM_CHARS/2)): text_str += \"", "log_fil = log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def left_click(): global click1 if", "metronome_interval == 0: if (metronome_time % (metronome_interval*4)) == metronome_interval*3: #", "cur_stages[stage] if item[1]: if type(item[0]) == type([]): item[0].remove(item[0][ind]) if len(item[0])", "0 # global base # write_to_log(str(amount2-base)) # base = amount2", "return metronome_time = 0 base_time = round(time.time()*1000) metronome_interval = int(100", "copy #\"THE BEER-WARE LICENSE\" (Revision 42): #bleach86 wrote this file.", "def do_metronome_action(): global metronome_running global metronome_interval if not metronome_running: return", "window.quit() atexit.register(exit_handler) def listen_for_right_click(): def on_click(x, y, button, pressed): #", "def start_metronome(event): run_metronome() # print(metronome_running) # arm_metronome = False def", "= False metronome_beats = int(data2['metronome_beats']) listener = None metronome_time =", "import Path from playsound import playsound from enum import Enum", "# print(metronome_interval) # print(time.time()*1000) if metronome_time % metronome_interval == 0:", "+= 1 update_time() if metronome_armed or time_count % 20 ==", "else: data2['borderless'] = False running_path = Path.cwd() NUM_CHARS = 11", "= 0 click2 = 0 cur_fil = None world_base_time =", "count count += 1 ''' METRONOME CODE ''' ''' Metronome", "Finished\", True), (\"Tower Leave\", True), (\"Enter Stronghold\", True), (\"Enter End\",", "# bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'], start_metronome) ''' this works for", "if (metronome_time % (metronome_interval*4)) == metronome_interval*3: # print(\"up beep\") play_up_beep()", "elif system_type == 'Darwin': directory = os.path.expanduser(data2['mac_saves']) elif system_type ==", "metronome_interval global metronome_running if data2['has_metronome_preset'] == 'true': play_metronome_preset() metronome_running =", "system_type == \"Darwin\": os.chdir(latest + '/stats/') else: os.chdir(latest + '\\\\stats\\\\')", "= 0 last_amount = 0 window = tk.Tk() # bg", "time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time) def tick_time(): global time_count global", "window.text.set(diff_txt) # print(base) if click2 == 0: rt = time.time()", "text_str += \" \" text_str += count_str for i in", "rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) return rtc[:-3] def main():", "global rt global click1 global click2 global amount2 global old_version", "- elapsed)/1000.0) # print(\"{} {} {}\".format(start_time, end_time, )) metronome_time +=", "json.load(timer_file) for item in data[\"stats-change\"]: if \"1100\" in item: amount", "if not metronome_running: return # print(metronome_interval) # metronome_time = program_time", "== 0: click2 = 1 click1 = 1 # print(float(amount2))", "amount2 def increment_counter(): global count count += 1 ''' METRONOME", "me a beer in return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\"", "== 0: ig = 0 rt = float(time.time()) - float(amount2)", "\"1100\" in item: amount = item[\"1100\"] # print(amount) latest =", "stage global ig global did_change if data2['auto_adjust'] == 'true': #", "0 base_update = int(data2['base_update']) rta_update = int(data2['rta_update']) * base_update metronome_bpm", "time.time() click1 = 1 click2 = 1 stage = 0", "if \"start_timer\" in txt: right_click() def update_time2(): window.text2.set(get_time()) window.after(1000, update_time2)", "= BindGlobal(widget=window) window.text = tk.StringVar() window.text2 = tk.StringVar() window.text3 =", "click1 global click2 global count global did_change count = 0", "click1 == 1: window.text.set(real_time()) elif click1 == 0: # rt", "click1 == 1: if old_version == True and stage ==", "update_time) def tick_time(): global time_count global metronome_armed time_count += 1", "if \"pause_timer\" in txt: left_click() if \"start_timer\" in txt: right_click()", "directory = os.path.expanduser(data2['mac_saves']) elif system_type == 'Windows': directory = os.path.expanduser(data2['windows_saves'])", "metronome_interval * metronome_beats: metronome_running = False return # print(metronome_time) #", "bg=data2['bg_color'], font=rta_font, textvariable=window.text) greeting.pack() if data2['show_igt'] == 'true': greeting2 =", "global old_version global stage global ig global did_change if data2['auto_adjust']", "you can buy me a beer in return input_fil =", "os.chdir(stats_dir) json_file = glob.glob('*.dat') stats_file = json_file[0] amount = 0", "click1 == 1: rt2 = time.time() real_time = rt2 -", "print(diff_txt) window.text.set(diff_txt) # print(base) if click2 == 0: rt =", "amount2 global old_version global stage global ig global did_change if", "elif system_type == 'Windows': directory = os.path.expanduser(data2['windows_saves']) amount2 = 0", "str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount == amount: ig = 0 return", "print(\"world base time now {}\".format(world_base_time)) # print(amount) amount2 = float(amount", "text_str += count_str for i in range(0, int(NUM_CHARS/2)): text_str +=", "time.sleep((metronome_interval - elapsed)/1000.0) # print(\"{} {} {}\".format(start_time, end_time, )) metronome_time", "return run_time[:-3] except: ig = 1 return '0:00:00.000' def window2():", "False metronome_active = False metronome_beats = int(data2['metronome_beats']) listener = None", "metronome_beats: metronome_running = False return # print(metronome_time) # print(metronome_interval) #", "+= \" \" text_str += count_str for i in range(0,", "playsound((running_path / str_name).as_posix(), block = True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def", "log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\") def left_click(): global click1 if click1 ==", "click2 == 0: rt = time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time)", "int(NUM_CHARS/2)): text_str += \" \" window.text3.set(text_str) window.after(rta_update, update_count) # def", "(\"Find Spawner\", True), (\"Exit Spawner\", True), (\"Exit Nether\", True), (\"Tower", "== 0: # rt = time.time() diff = amount2 -", "\"Wool\", \"Gravel\" ], True), (\"Enter Nether\", True), (\"Find Fortress\", True),", "= str(datetime.timedelta(seconds=rt)) stage = 1 print(\"stop\") return rtc[:-3] else: ig", "# class Category: # def __init__(): # self.actions = []", "/ data2[\"current_section\"] # log_fil.touch() # log_fil = log_fil.open(\"a\") # log_fil.write(str(text)+\"\\n\")", "metronome_running: start_time = round(time.time()*1000) - base_time do_metronome_action() end_time = round(time.time()*1000)", "glob.glob('*.dat') stats_file = json_file[0] amount = 0 with open(stats_file) as", "metronome_time = program_time - metronome_start_time if metronome_time >= metronome_interval *", "NUM_CHARS = 11 system_type = platform.system() if system_type == 'Linux':", "metronome_armed or metronome_running: return metronome_armed = True # x =", "'0:00:00.000' else: try: latest = max([os.path.join(directory,d) for d in os.listdir(directory)],", "= float(amount - world_base_time) / 20 # print(amount2) run_time =", "milliseconds=0.5)) if last_amount == amount: ig = 0 return run_time[:-3]", "rt = time.time() diff = amount2 - base rtc =", "increment_counter(): global count count += 1 ''' METRONOME CODE '''", "font=igt_font, textvariable=window.text2) greeting2.pack() if data2['use_counter'] == 'true': greeting3 = tk.Label(fg=data2['counter_color'],", "([ \"Savannah\", \"Desert\", \"Plains\", \"Other\" ], False), ([ \"0-15\", \"15-30\",", "time_count = 0 rsg = [ (\"World Created\", True), ([", "tk.StringVar() window.text3 = tk.StringVar() window.text4 = tk.StringVar() window.geometry(\"{}x{}\".format(data2[\"width\"], data2[\"height\"])) window.configure(bg='black')", "# bg.gbind(data2['increment'], on_increment_counter) # greeting.after(0, update_count) if data2['use_splits'] == 'true':", "= 0 ig = 0 base = 0 program_time =", "rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) # rt = float(amount2)", "or time_count % 20 == 0: check_input() window.after(rta_update, tick_time) def", "\"30-45\", \"45-60\", \"60-75\", \"75+\" ], False), ([ \"Iron\", \"Logs\", \"Feathers\",", "'true': # bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'], clicked3)", "metronome_interval = 0 if data2['auto_start'] == 'true': click1 = 1", "window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time) greeting.after(0, update_time2)", "- float(amount2) if data2['allow_offset'] == 'true': rt += base did_change", "metronome_armed = True if \"pause_timer\" in txt: left_click() if \"start_timer\"", "rtc[:-3] # print(diff_txt) window.text.set(diff_txt) # print(base) if click2 == 0:", "cur_stages = copy.deepcopy(rsg) update_split() def cycle(event): global ind, stage ind", "= [ (\"World Created\", True), ([ \"Savannah\", \"Desert\", \"Plains\", \"Other\"", "float(time.time()) - float(amount2) if data2['allow_offset'] == 'true': rt += base", "== 'true': # bg.gbind(data2['arm_metronome'], arm_metronome) # bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'],", "(font_name, split_font_size, font_modifiers) greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack()", "Nether\", True), (\"Tower Build Start\", True), (\"Tower Build Finished\", True),", "return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" # continuously read from", "1 # class Attempt: stage = 0 ind = 0", "False count = 0 ig = 0 base = 0", "amount2 global ig global did_change # print(\"-------------------------\") if data2['1.7+'] ==", "0 click2 = 0 cur_fil = None world_base_time = 0", "# bg.gbind(data2['split'], split) # bg.gbind(data2['skip'], skip) reset_split() # greeting.after(0, update_count)", "= input_fil.read_text() input_fil.write_text(\"\") global metronome_armed # print(txt) if \"start_metronome\" in", "def on_press(event): left_click() def on_press2(event): right_click() def update_split(): global stage", "if len(item[0]) == 0: stage += 1 ind = 0", "json_file: data = json.load(json_file) try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount", "- rt rtc = str(datetime.timedelta(seconds=real_time)) # rt = float(amount2) -", "daemon=True) # x.start() listen_for_right_click() print(\"armed and ready\") def start_metronome(event): run_metronome()", "== \"Darwin\": os.chdir(latest + '/stats/') else: os.chdir(latest + '\\\\stats\\\\') json_file", "= float(amount) / 20 run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount", "= 0 time_count = 0 rsg = [ (\"World Created\",", "rtc[:-3] else: if click1 == 1: rt2 = time.time() real_time", "\"75+\" ], False), ([ \"Iron\", \"Logs\", \"Feathers\", \"Wool\", \"Gravel\" ],", "rt = float(amount2) - float(base) # rtc = str(datetime.timedelta(seconds=rt)) return", "amount = 0 with open(stats_file) as timer_file: # print(timer_file) data", "None world_base_time = 0 def get_time(): global last_amount global old_version", "= amount ig = 0 return run_time[:-3] except: ig =", "global world_base_time mc_dir = Path(directory).parent stats_dir = mc_dir / \"stats\"", "global did_change # print(\"-------------------------\") if data2['1.7+'] == 'false': try: global", "this works for the window detecting right click ''' #", "amount: ig = 0 return run_time[:-3] else: did_change = True", "= json_file[0] amount = 0 with open(stats_file) as timer_file: #", "# # convert actions to attempts # def read(): #", "data = json.load(json_file) try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except: amount =", "= True json_file.close() amount2 = float(amount) / 20 run_time =", "# bg.gbind(data2['skip'], skip) reset_split() # greeting.after(0, update_count) # bg.gbind(data2['pause'], on_press)", "window.after(rta_update, tick_time) def check_input(): txt = input_fil.read_text() input_fil.write_text(\"\") global metronome_armed", "= 1 click2 = 1 stage = 0 reset_split() return", "= 0 else: ind = 0 update_split() def split(event): global", "on_click(x, y, button, pressed): # print(button) if pressed: if pressed", "False metronome_running = False metronome_active = False metronome_beats = int(data2['metronome_beats'])", "+= count_str for i in range(0, int(NUM_CHARS/2)): text_str += \"", "listener.start() listener.join() ''' Sound playing code ''' def play_file_named(str_name): playsound((running_path", "= time.time() diff = amount2 - base rtc = str(datetime.timedelta(seconds=diff))", "= glob.glob('*.json') timer = json_file[0] with open(timer) as json_file: data", "os import sys import platform import json import glob import", "return run_time[:-3] except: ig = 1 return '0:00:00.000' else: try:", "= Path(directory).parent stats_dir = mc_dir / \"stats\" os.chdir(stats_dir) json_file =", "start_metronome) #window.bind(\"<Button-1>\", clicked) #window.bind(\"<Button-3>\", clicked2) greeting.after(0, tick_time) greeting.after(0, update_time2) window.title(\"MCtimer\")", "\"start_metronome\" in txt: print(data2['enable_metronome']) if data2['enable_metronome'] == 'true': start_metronome(None) if", "True), (\"Enter End\", True), (\"Finish\", True) ] cur_stages = {}", "= round(time.time()*1000) - base_time do_metronome_action() end_time = round(time.time()*1000) - base_time", "update_split() def on_press(event): left_click() def on_press2(event): right_click() def update_split(): global", "Start\", True), (\"Tower Build Finished\", True), (\"Tower Leave\", True), (\"Enter", "# class Attempt: stage = 0 ind = 0 time_count", "from pynput import mouse from pathlib import Path from playsound", "metronome_time global metronome_interval global metronome_running if data2['has_metronome_preset'] == 'true': play_metronome_preset()", "1 ''' METRONOME CODE ''' ''' Metronome mouse listener '''", "\"45-60\", \"60-75\", \"75+\" ], False), ([ \"Iron\", \"Logs\", \"Feathers\", \"Wool\",", "True # x = threading.Thread(target=listen_for_right_click, daemon=True) # x.start() listen_for_right_click() print(\"armed", "os.listdir(directory)], key=os.path.getmtime) # print(latest) if latest != cur_fil: cur_fil =", "latest = max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) # print(latest)", "bg.gbind(data2['skip'], skip) reset_split() # greeting.after(0, update_count) # bg.gbind(data2['pause'], on_press) #", "Attempt: stage = 0 ind = 0 time_count = 0", "button, pressed): # print(button) if pressed: if pressed and button", "{}\".format(world_base_time)) # print(amount) amount2 = float(amount - world_base_time) / 20", "diff = amount2 - base rtc = str(datetime.timedelta(seconds=diff)) diff_txt =", "stage text_str = cur_stages[stage][0] if type(text_str) == type([]): text_str =", "str(datetime.timedelta(seconds=rt)) stage = 1 print(\"stop\") return rtc[:-3] else: ig =", "ig = 0 return run_time[:-3] else: did_change = True #", "cur_fil = latest world_base_time = amount # print(\"world base time", "print(base) if did_change: rt = float(time.time()) - float(amount2) if data2['allow_offset']", "if pressed and button == mouse.Button.right: start_metronome(None) return False #", "== metronome_interval*3: # print(\"up beep\") play_up_beep() # pass else: #", "= Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil = log_dir / data2[\"current_section\"] # log_fil.touch()", "rtc = str(datetime.timedelta(seconds=real_time)) # rt = float(amount2) - float(base) #", "if \"start_metronome\" in txt: print(data2['enable_metronome']) if data2['enable_metronome'] == 'true': start_metronome(None)", "write(): # class Actions(Enum): # CREATE_WORLD = 0 # START", "== 'true': start_metronome(None) if \"arm_metronome\" in txt: metronome_armed = True", "if metronome_time % metronome_interval == 0: if (metronome_time % (metronome_interval*4))", "# self.attempts = [] # # convert actions to attempts", "def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome functions ''' def arm_metronome(event):", "whatever you want with this stuff. #If we meet some", "play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06) play_file_named(\"MetronomePreset.mp3\") ''' Metronome", "amount ig = 0 return run_time[:-3] except: ig = 1", "metronome_armed global metronome_running if metronome_armed or metronome_running: return metronome_armed =", "txt: left_click() if \"start_timer\" in txt: right_click() def update_time2(): window.text2.set(get_time())", "window detecting right click ''' # window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\", clicked)", "if ind == len(item[0]): ind = 0 else: ind =", "with mouse.Listener(on_click=on_click) as listener: # listener.start() listener.join() ''' Sound playing", "True) def play_up_beep(): play_file_named(\"MetronomeHit.mp3\") def play_normal_beep(): play_file_named(\"MetronomeBase.mp3\") def play_metronome_preset(): time.sleep(0.06)", "== 1: if old_version == True and stage == 0:", "''' METRONOME CODE ''' ''' Metronome mouse listener ''' def", "range(0, int(NUM_CHARS/2)): text_str += \" \" text_str += count_str for", "= None metronome_time = 0 base_update = int(data2['base_update']) rta_update =", "= time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time) def tick_time(): global time_count", "import tkinter as tk from pynput import mouse from pathlib", "clicked3(event): sys.exit(1) def clicked2(event): right_click() def clicked(event): left_click() def write_to_log(text):", "- base_time elapsed = end_time - start_time time.sleep((metronome_interval - elapsed)/1000.0)", "= 0 ind = 0 time_count = 0 rsg =", "greeting2 = tk.Label(fg=data2['igt_color'], bg=data2['bg_color'], font=igt_font, textvariable=window.text2) greeting2.pack() if data2['use_counter'] ==", "as json_file: data = json.load(json_file) try: amount = data['stats']['minecraft:custom']['minecraft:play_one_minute'] except:", "font_modifiers) greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'],", "Spawner\", True), (\"Exit Spawner\", True), (\"Exit Nether\", True), (\"Tower Build", "/ \"stats\" os.chdir(stats_dir) json_file = glob.glob('*.dat') stats_file = json_file[0] amount", "if metronome_armed or time_count % 20 == 0: check_input() window.after(rta_update,", "True), (\"Exit Spawner\", True), (\"Exit Nether\", True), (\"Tower Build Start\",", ")) metronome_time += metronome_interval def do_metronome_action(): global metronome_running global metronome_interval", "for the window detecting right click ''' # window.bind(data2['start_metronome'], start_metronome)", "# greeting.after(0, update_count) # bg.gbind(data2['pause'], on_press) # bg.gbind(data2['reset_start'], on_press2) #", "\"pause_timer\" in txt: left_click() if \"start_timer\" in txt: right_click() def", "20 # print(amount2) run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) # print(run_time) if", "metronome_interval = int(100 * 60 / metronome_bpm)*10 time.sleep(float(data2['beat_offset'])*metronome_interval/1000.0) # print(metronome_interval)555", "= tk.Tk() # bg = BindGlobal(widget=window) window.text = tk.StringVar() window.text2", "rt = time.time() window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time) def tick_time(): global", "# self.actions = [] # self.attempts = [] # #", "tkinter as tk from pynput import mouse from pathlib import", "tk from pynput import mouse from pathlib import Path from", "click2 = 0 cur_fil = None world_base_time = 0 def", "(metronome_interval*4)) == metronome_interval*3: # print(\"up beep\") play_up_beep() # pass else:", "bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'], start_metronome) ''' this works for the", "''' def play_file_named(str_name): playsound((running_path / str_name).as_posix(), block = True) def", "(font_name, rta_font_size, font_modifiers) igt_font = (font_name, igt_font_size, font_modifiers) greeting =", "int(data2['metronome_bpm']) metronome_interval = 0 if data2['auto_start'] == 'true': click1 =", "!= cur_fil: cur_fil = latest world_base_time = amount # print(\"world", "text_str = cur_stages[stage][0] if type(text_str) == type([]): text_str = text_str[ind]", "- base rtc = str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3] # print(diff_txt)", "print(\"{} {} {}\".format(start_time, end_time, )) metronome_time += metronome_interval def do_metronome_action():", "ig global did_change # print(\"-------------------------\") if data2['1.7+'] == 'false': try:", "click1 == 0: # rt = time.time() diff = amount2", "# x = threading.Thread(target=listen_for_right_click, daemon=True) # x.start() listen_for_right_click() print(\"armed and", "float(amount2) rtc = str(datetime.timedelta(seconds=rt)) stage = 1 print(\"stop\") return rtc[:-3]", "= True print(latest + \"\\nTime: \" + run_time) last_amount =", "write_to_log(text): pass # log_dir = Path(\"/Users/sharpieman20/MCtimer/MCtimer/logs\") # log_fil = log_dir", "max([os.path.join(directory,d) for d in os.listdir(directory)], key=os.path.getmtime) # print(latest) if latest", "beer in return input_fil = Path(\"/Users/sharpieman20/MCtimer/MCtimer\") / \"input.txt\" # continuously", "= data2['font_name'] rta_font_size = data2['rta_font_size'] igt_font_size = data2['igt_font_size'] font_modifiers =", "greeting3 = tk.Label(fg=data2['counter_color'], bg=data2['bg_color'], font=rta_font, textvariable=window.text3) greeting3.pack() # bg.gbind(data2['increment'], on_increment_counter)", "ig = 0 base = 0 program_time = 0 metronome_armed", "= json.load(json_file) if data2['borderless'] == 'true': data2['borderless'] else: data2['borderless'] =", "global count global did_change count = 0 did_change = True", "greeting4 = tk.Label(fg=data2['split_color'], bg=data2['bg_color'], font=split_font, textvariable=window.text4) greeting4.pack() # bg.gbind(data2['cycle'], cycle)", "0 update_split() def split(event): global stage, ind item = cur_stages[stage]", "# bg.gbind(data2['start_metronome'], start_metronome) # bg.gbind(data2['exit'], clicked3) # bg.bind(data2['start_metronome'], start_metronome) '''", "as you retain this notice you can do whatever you", "float(amount) / 20 run_time = str(datetime.timedelta(seconds=amount2, milliseconds=0.5)) if last_amount ==", "txt: right_click() def update_time2(): window.text2.set(get_time()) window.after(1000, update_time2) def update_count(): count_str", "def on_click(x, y, button, pressed): # print(button) if pressed: if", "start_time time.sleep((metronome_interval - elapsed)/1000.0) # print(\"{} {} {}\".format(start_time, end_time, ))", "do whatever you want with this stuff. #If we meet", "0: click1 = 0 # global base # write_to_log(str(amount2-base)) #", "rtc = str(datetime.timedelta(seconds=rt)) return rtc[:-3] else: if click1 == 1:", "0 program_time = 0 metronome_armed = False metronome_running = False", "= mc_dir / \"stats\" os.chdir(stats_dir) json_file = glob.glob('*.dat') stats_file =", "old_version global amount2 global ig global did_change # print(\"-------------------------\") if", "window.text.set(\"0:00:00.000\") # window.after(int(data2['rta_update'])/10, update_time) def tick_time(): global time_count global metronome_armed", "= 0 program_time = 0 metronome_armed = False metronome_running =", "metronome_armed or time_count % 20 == 0: check_input() window.after(rta_update, tick_time)", "json.load(json_file) if data2['borderless'] == 'true': data2['borderless'] else: data2['borderless'] = False", "igt_font_size = data2['igt_font_size'] font_modifiers = data2['font_modifiers'] rta_font = (font_name, rta_font_size,", "range(0, int(NUM_CHARS/2)): text_str += \" \" window.text3.set(text_str) window.after(rta_update, update_count) #", "0 return run_time[:-3] else: did_change = True # print(latest +", "global metronome_running if metronome_armed or metronome_running: return metronome_armed = True", "+ \"\\nTime: \" + run_time) last_amount = amount ig =", "'true': split_font_size = data2['split_font_size'] split_font = (font_name, split_font_size, font_modifiers) greeting4", "* base_update metronome_bpm = int(data2['metronome_bpm']) metronome_interval = 0 if data2['auto_start']", "the window detecting right click ''' # window.bind(data2['start_metronome'], start_metronome) #window.bind(\"<Button-1>\",", "(\"Enter Nether\", True), (\"Find Fortress\", True), (\"Find Spawner\", True), (\"Exit", "# bg.gbind(data2['reset_start'], on_press2) # if data2['enable_metronome'] == 'true': # bg.gbind(data2['arm_metronome'],", "(\"Tower Leave\", True), (\"Enter Stronghold\", True), (\"Enter End\", True), (\"Finish\",", "print(\"up beep\") play_up_beep() # pass else: # print(\"normal beep\") play_normal_beep()", "else: # print(\"normal beep\") play_normal_beep() # pass # print(time.time()*1000) #", "if click1 == 1: rt2 = time.time() real_time = rt2", "global metronome_time global metronome_interval global metronome_running if data2['has_metronome_preset'] == 'true':", "[ (\"World Created\", True), ([ \"Savannah\", \"Desert\", \"Plains\", \"Other\" ],", "rt = time.time() old_version = False did_change = False count", "= rt2 - rt rtc = str(datetime.timedelta(seconds=real_time)) # rt =", "else: ind = 0 update_split() def split(event): global stage, ind", "= amount2 def right_click(): global click1 global click2 global count", "return rtc[:-3] else: ig = 0 rt2 = time.time() real_time", "glob import datetime import time import threading import tkinter as", "= str(datetime.timedelta(seconds=diff)) diff_txt = rtc[:-3] # print(diff_txt) window.text.set(diff_txt) # print(base)" ]
[ "metadata (in the form of a string) when an offset", "offset commit API allows users to provide additional metadata (in", "topic and partition tuple Keyword Arguments: topic (str): A topic", "The ids of all brokers that contain replicas of the", "topic (str): The topic name of the partition this metadata", "The id of the partition this metadata relates to. leader", "state in the MetadataResponse. Keyword Arguments: topic (str): The topic", "Arguments: topic (str): A topic name partition (int): A partition", "0, 0 means no retries backoff_ms (int): Milliseconds to backoff.", "describing the state in the MetadataResponse. Keyword Arguments: topic (str):", "that is the leader for the partition. replicas (List[int]): The", "allows users to provide additional metadata (in the form of", "the broker that is the leader for the partition. replicas", "partition metadata. \"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\", \"replicas\",", "\"leader\", \"replicas\", \"isr\", \"error\"]) \"\"\"The Kafka offset commit API The", "when an offset is committed. This can be useful (for", "ids of all brokers that contain replicas of the partition.", "(int): The Kafka broker port. rack (str): The rack of", "\"state\", \"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define retry policy for async", "leaderEpoch, metadata) [\"offset\", \"metadata\"]) \"\"\"An offset and timestamp tuple Keyword", "replicas of the partition. isr (List[int]): The ids of all", "`RACK1`, `us-east-1d`. Default: None \"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\",", "\"partition\"]) \"\"\"A Kafka broker metadata used by admin tools. Keyword", "which is used to in rack aware partition assignment for", "hostname. port (int): The Kafka broker port. rack (str): The", "\"group\", \"state\", \"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define retry policy for", "what time the commit was made, etc. Keyword Arguments: offset", "absolute_import from collections import namedtuple \"\"\"A topic and partition tuple", "namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\", \"member_metadata\",", "(int): The Kafka broker id. host (str): The Kafka broker", "namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\",", "topic name of the partition this metadata relates to. partition", "timestamp (int): The timestamp associated to the offset \"\"\" OffsetAndTimestamp", "rack (str): The rack of the broker, which is used", "metadata relates to. partition (int): The id of the partition", "for the partition. replicas (List[int]): The ids of all brokers", "all brokers that contain in-sync replicas of the partition. error", "offset \"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\",", "timestamp tuple Keyword Arguments: offset (int): An offset timestamp (int):", "this partition metadata. \"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\",", "The id of the broker that is the leader for", "replicas (List[int]): The ids of all brokers that contain replicas", "namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A Kafka broker metadata used by admin", "partition assignment for fault tolerance. Examples: `RACK1`, `us-east-1d`. Default: None", "topic name partition (int): A partition id \"\"\" TopicPartition =", "namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\", \"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define retry", "(str): The Kafka broker hostname. port (int): The Kafka broker", "commit API The Kafka offset commit API allows users to", "This can be useful (for example) to store information about", "commit, what time the commit was made, etc. Keyword Arguments:", "\"\"\" TopicPartition = namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A Kafka broker metadata", "Keyword Arguments: offset (int): The offset to be committed metadata", "# TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\", \"metadata\"]) \"\"\"An", "tuple Keyword Arguments: offset (int): An offset timestamp (int): The", "isr (List[int]): The ids of all brokers that contain in-sync", "broker port. rack (str): The rack of the broker, which", "to. partition (int): The id of the partition this metadata", "Limit (int): Number of retries. limit >= 0, 0 means", "\"\"\" from __future__ import absolute_import from collections import namedtuple \"\"\"A", "host (str): The Kafka broker hostname. port (int): The Kafka", "for async producer Keyword Arguments: Limit (int): Number of retries.", "`us-east-1d`. Default: None \"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\",", "time the commit was made, etc. Keyword Arguments: offset (int):", "(int): An offset timestamp (int): The timestamp associated to the", "the commit, what time the commit was made, etc. Keyword", "offset (int): The offset to be committed metadata (str): Non-null", "Arguments: Limit (int): Number of retries. limit >= 0, 0", "Other useful structs \"\"\" from __future__ import absolute_import from collections", "leader for the partition. replicas (List[int]): The ids of all", "import absolute_import from collections import namedtuple \"\"\"A topic and partition", "\"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\", \"protocol_type\", \"protocol\", \"members\",", "is committed. This can be useful (for example) to store", "metadata. \"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\", \"replicas\", \"isr\",", "request for this partition metadata. \"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\",", "Keyword Arguments: topic (str): The topic name of the partition", "Number of retries. limit >= 0, 0 means no retries", "made, etc. Keyword Arguments: offset (int): The offset to be", "import namedtuple \"\"\"A topic and partition tuple Keyword Arguments: topic", "\"\"\"Define retry policy for async producer Keyword Arguments: Limit (int):", "async producer Keyword Arguments: Limit (int): Number of retries. limit", "Arguments: topic (str): The topic name of the partition this", "means no retries backoff_ms (int): Milliseconds to backoff. retry_on_timeouts: \"\"\"", "add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\", \"metadata\"]) \"\"\"An offset and", "to store information about which node made the commit, what", "relates to. leader (int): The id of the broker that", "TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\", \"metadata\"]) \"\"\"An offset", "leader (int): The id of the broker that is the", "of all brokers that contain in-sync replicas of the partition.", "\"authorized_operations\"]) \"\"\"Define retry policy for async producer Keyword Arguments: Limit", "of the partition this metadata relates to. partition (int): The", "for fault tolerance. Examples: `RACK1`, `us-east-1d`. Default: None \"\"\" BrokerMetadata", "tools. Keyword Arguments: nodeID (int): The Kafka broker id. host", "PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\", \"replicas\", \"isr\", \"error\"]) \"\"\"The", "in-sync replicas of the partition. error (KafkaError): A KafkaError object", "limit >= 0, 0 means no retries backoff_ms (int): Milliseconds", "offset to be committed metadata (str): Non-null metadata \"\"\" OffsetAndMetadata", "aware partition assignment for fault tolerance. Examples: `RACK1`, `us-east-1d`. Default:", "\"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", # TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch,", "(int): Milliseconds to backoff. retry_on_timeouts: \"\"\" RetryOptions = namedtuple(\"RetryOptions\", [\"limit\",", "the state in the MetadataResponse. Keyword Arguments: topic (str): The", "brokers that contain in-sync replicas of the partition. error (KafkaError):", "useful structs \"\"\" from __future__ import absolute_import from collections import", "this metadata relates to. partition (int): The id of the", "partition. replicas (List[int]): The ids of all brokers that contain", "TopicPartition = namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A Kafka broker metadata used", "(int): The id of the broker that is the leader", "example) to store information about which node made the commit,", "Kafka offset commit API The Kafka offset commit API allows", "\"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\",", "partition this metadata relates to. leader (int): The id of", "Keyword Arguments: topic (str): A topic name partition (int): A", "\"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\",", "\"replicas\", \"isr\", \"error\"]) \"\"\"The Kafka offset commit API The Kafka", "backoff_ms (int): Milliseconds to backoff. retry_on_timeouts: \"\"\" RetryOptions = namedtuple(\"RetryOptions\",", "A topic name partition (int): A partition id \"\"\" TopicPartition", "\"host\", \"port\", \"rack\"]) \"\"\"A topic partition metadata describing the state", "Milliseconds to backoff. retry_on_timeouts: \"\"\" RetryOptions = namedtuple(\"RetryOptions\", [\"limit\", \"backoff_ms\",", "about which node made the commit, what time the commit", "the MetadataResponse. Keyword Arguments: topic (str): The topic name of", "node made the commit, what time the commit was made,", "namedtuple(\"OffsetAndMetadata\", # TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\", \"metadata\"])", "was made, etc. Keyword Arguments: offset (int): The offset to", "Kafka broker id. host (str): The Kafka broker hostname. port", "of the broker, which is used to in rack aware", "MetadataResponse. Keyword Arguments: topic (str): The topic name of the", "store information about which node made the commit, what time", "= namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\", \"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define", "producer Keyword Arguments: Limit (int): Number of retries. limit >=", "broker that is the leader for the partition. replicas (List[int]):", "(List[int]): The ids of all brokers that contain in-sync replicas", "None \"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\", \"rack\"]) \"\"\"A", "The Kafka offset commit API allows users to provide additional", "the commit was made, etc. Keyword Arguments: offset (int): The", "namedtuple \"\"\"A topic and partition tuple Keyword Arguments: topic (str):", "Keyword Arguments: offset (int): An offset timestamp (int): The timestamp", "[\"error_code\", \"group\", \"state\", \"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define retry policy", "offset is committed. This can be useful (for example) to", "the partition. error (KafkaError): A KafkaError object associated with the", "\"\"\"A Kafka broker metadata used by admin tools. Keyword Arguments:", "OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\", \"metadata\"]) \"\"\"An offset and timestamp tuple", "namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\", \"rack\"]) \"\"\"A topic partition metadata describing", "a string) when an offset is committed. This can be", "A KafkaError object associated with the request for this partition", "admin tools. Keyword Arguments: nodeID (int): The Kafka broker id.", "[\"offset\", \"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"])", "offset timestamp (int): The timestamp associated to the offset \"\"\"", "Examples: `RACK1`, `us-east-1d`. Default: None \"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\",", "The timestamp associated to the offset \"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\",", "broker hostname. port (int): The Kafka broker port. rack (str):", "\"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define retry policy for async producer", "and timestamp tuple Keyword Arguments: offset (int): An offset timestamp", "(int): The id of the partition this metadata relates to.", "id of the partition this metadata relates to. leader (int):", "that contain in-sync replicas of the partition. error (KafkaError): A", "brokers that contain replicas of the partition. isr (List[int]): The", "and partition tuple Keyword Arguments: topic (str): A topic name", "0 means no retries backoff_ms (int): Milliseconds to backoff. retry_on_timeouts:", "made the commit, what time the commit was made, etc.", "in the MetadataResponse. Keyword Arguments: topic (str): The topic name", "\"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation", "be committed metadata (str): Non-null metadata \"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\",", "id of the broker that is the leader for the", "rack aware partition assignment for fault tolerance. Examples: `RACK1`, `us-east-1d`.", "\"port\", \"rack\"]) \"\"\"A topic partition metadata describing the state in", "The topic name of the partition this metadata relates to.", "error (KafkaError): A KafkaError object associated with the request for", "metadata) [\"offset\", \"metadata\"]) \"\"\"An offset and timestamp tuple Keyword Arguments:", "[\"topic\", \"partition\", \"leader\", \"replicas\", \"isr\", \"error\"]) \"\"\"The Kafka offset commit", "= namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\", \"rack\"]) \"\"\"A topic partition metadata", "Keyword Arguments: nodeID (int): The Kafka broker id. host (str):", "[\"topic\", \"partition\"]) \"\"\"A Kafka broker metadata used by admin tools.", "retries. limit >= 0, 0 means no retries backoff_ms (int):", "GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\", \"protocol_type\", \"protocol\", \"members\", \"authorized_operations\"])", "the offset \"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation =", "\"member_metadata\", \"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\", \"protocol_type\", \"protocol\",", "of retries. limit >= 0, 0 means no retries backoff_ms", "(for example) to store information about which node made the", "The offset to be committed metadata (str): Non-null metadata \"\"\"", "the partition this metadata relates to. partition (int): The id", "\"members\", \"authorized_operations\"]) \"\"\"Define retry policy for async producer Keyword Arguments:", "assignment for fault tolerance. Examples: `RACK1`, `us-east-1d`. Default: None \"\"\"", "= namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\", \"replicas\", \"isr\", \"error\"]) \"\"\"The Kafka", "[\"member_id\", \"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\", \"group\",", "OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", # TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata)", "tuple Keyword Arguments: topic (str): A topic name partition (int):", "metadata used by admin tools. Keyword Arguments: nodeID (int): The", "for this partition metadata. \"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\",", "commit was made, etc. Keyword Arguments: offset (int): The offset", "rack of the broker, which is used to in rack", "etc. Keyword Arguments: offset (int): The offset to be committed", "be useful (for example) to store information about which node", "associated to the offset \"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"])", "partition id \"\"\" TopicPartition = namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A Kafka", "\"\"\"A topic partition metadata describing the state in the MetadataResponse.", "partition (int): The id of the partition this metadata relates", "metadata relates to. leader (int): The id of the broker", "(str): The rack of the broker, which is used to", "A partition id \"\"\" TopicPartition = namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A", "\"isr\", \"error\"]) \"\"\"The Kafka offset commit API The Kafka offset", "contain in-sync replicas of the partition. error (KafkaError): A KafkaError", "to be committed metadata (str): Non-null metadata \"\"\" OffsetAndMetadata =", "relates to. partition (int): The id of the partition this", "is used to in rack aware partition assignment for fault", "policy for async producer Keyword Arguments: Limit (int): Number of", "associated with the request for this partition metadata. \"\"\" PartitionMetadata", "\"rack\"]) \"\"\"A topic partition metadata describing the state in the", "metadata \"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", # TODO add leaderEpoch: OffsetAndMetadata(offset,", "used by admin tools. Keyword Arguments: nodeID (int): The Kafka", "(str): A topic name partition (int): A partition id \"\"\"", "replicas of the partition. error (KafkaError): A KafkaError object associated", "\"metadata\"]) \"\"\"An offset and timestamp tuple Keyword Arguments: offset (int):", "all brokers that contain replicas of the partition. isr (List[int]):", "in rack aware partition assignment for fault tolerance. Examples: `RACK1`,", "\"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\", \"rack\"]) \"\"\"A topic", "__future__ import absolute_import from collections import namedtuple \"\"\"A topic and", "(List[int]): The ids of all brokers that contain replicas of", "= namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\",", "partition tuple Keyword Arguments: topic (str): A topic name partition", "string) when an offset is committed. This can be useful", "Kafka broker metadata used by admin tools. Keyword Arguments: nodeID", "retry policy for async producer Keyword Arguments: Limit (int): Number", "the partition this metadata relates to. leader (int): The id", "the request for this partition metadata. \"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\",", "committed metadata (str): Non-null metadata \"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", #", "\"\"\" PartitionMetadata = namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\", \"replicas\", \"isr\", \"error\"])", "name partition (int): A partition id \"\"\" TopicPartition = namedtuple(\"TopicPartition\",", "to backoff. retry_on_timeouts: \"\"\" RetryOptions = namedtuple(\"RetryOptions\", [\"limit\", \"backoff_ms\", \"retry_on_timeouts\"])", "Non-null metadata \"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", # TODO add leaderEpoch:", "to. leader (int): The id of the broker that is", "namedtuple(\"PartitionMetadata\", [\"topic\", \"partition\", \"leader\", \"replicas\", \"isr\", \"error\"]) \"\"\"The Kafka offset", "Kafka offset commit API allows users to provide additional metadata", "form of a string) when an offset is committed. This", "(in the form of a string) when an offset is", "= namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\",", "\"protocol\", \"members\", \"authorized_operations\"]) \"\"\"Define retry policy for async producer Keyword", "broker metadata used by admin tools. Keyword Arguments: nodeID (int):", "the partition. replicas (List[int]): The ids of all brokers that", "object associated with the request for this partition metadata. \"\"\"", "offset (int): An offset timestamp (int): The timestamp associated to", "Kafka broker port. rack (str): The rack of the broker,", "the broker, which is used to in rack aware partition", "metadata (str): Non-null metadata \"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", # TODO", "Kafka broker hostname. port (int): The Kafka broker port. rack", "from __future__ import absolute_import from collections import namedtuple \"\"\"A topic", "= namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A Kafka broker metadata used by", "users to provide additional metadata (in the form of a", "committed. This can be useful (for example) to store information", ">= 0, 0 means no retries backoff_ms (int): Milliseconds to", "no retries backoff_ms (int): Milliseconds to backoff. retry_on_timeouts: \"\"\" RetryOptions", "The ids of all brokers that contain in-sync replicas of", "from collections import namedtuple \"\"\"A topic and partition tuple Keyword", "commit API allows users to provide additional metadata (in the", "contain replicas of the partition. isr (List[int]): The ids of", "API allows users to provide additional metadata (in the form", "broker, which is used to in rack aware partition assignment", "Default: None \"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\", \"rack\"])", "The rack of the broker, which is used to in", "\"\"\" Other useful structs \"\"\" from __future__ import absolute_import from", "port (int): The Kafka broker port. rack (str): The rack", "An offset timestamp (int): The timestamp associated to the offset", "partition. isr (List[int]): The ids of all brokers that contain", "metadata describing the state in the MetadataResponse. Keyword Arguments: topic", "retries backoff_ms (int): Milliseconds to backoff. retry_on_timeouts: \"\"\" RetryOptions =", "The Kafka broker hostname. port (int): The Kafka broker port.", "\"\"\"An offset and timestamp tuple Keyword Arguments: offset (int): An", "(int): A partition id \"\"\" TopicPartition = namedtuple(\"TopicPartition\", [\"topic\", \"partition\"])", "fault tolerance. Examples: `RACK1`, `us-east-1d`. Default: None \"\"\" BrokerMetadata =", "tolerance. Examples: `RACK1`, `us-east-1d`. Default: None \"\"\" BrokerMetadata = namedtuple(\"BrokerMetadata\",", "BrokerMetadata = namedtuple(\"BrokerMetadata\", [\"nodeId\", \"host\", \"port\", \"rack\"]) \"\"\"A topic partition", "structs \"\"\" from __future__ import absolute_import from collections import namedtuple", "collections import namedtuple \"\"\"A topic and partition tuple Keyword Arguments:", "The Kafka broker id. host (str): The Kafka broker hostname.", "the form of a string) when an offset is committed.", "information about which node made the commit, what time the", "is the leader for the partition. replicas (List[int]): The ids", "= namedtuple(\"OffsetAndMetadata\", # TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\",", "of the broker that is the leader for the partition.", "of the partition. isr (List[int]): The ids of all brokers", "of a string) when an offset is committed. This can", "timestamp associated to the offset \"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\",", "which node made the commit, what time the commit was", "(int): Number of retries. limit >= 0, 0 means no", "Arguments: nodeID (int): The Kafka broker id. host (str): The", "[\"offset\", \"metadata\"]) \"\"\"An offset and timestamp tuple Keyword Arguments: offset", "the partition. isr (List[int]): The ids of all brokers that", "(KafkaError): A KafkaError object associated with the request for this", "to provide additional metadata (in the form of a string)", "port. rack (str): The rack of the broker, which is", "with the request for this partition metadata. \"\"\" PartitionMetadata =", "MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\", \"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation =", "Keyword Arguments: Limit (int): Number of retries. limit >= 0,", "[\"nodeId\", \"host\", \"port\", \"rack\"]) \"\"\"A topic partition metadata describing the", "(str): The topic name of the partition this metadata relates", "Arguments: offset (int): An offset timestamp (int): The timestamp associated", "\"partition\", \"leader\", \"replicas\", \"isr\", \"error\"]) \"\"\"The Kafka offset commit API", "provide additional metadata (in the form of a string) when", "id. host (str): The Kafka broker hostname. port (int): The", "nodeID (int): The Kafka broker id. host (str): The Kafka", "OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation = namedtuple(\"MemberInformation\", [\"member_id\", \"client_id\",", "id \"\"\" TopicPartition = namedtuple(\"TopicPartition\", [\"topic\", \"partition\"]) \"\"\"A Kafka broker", "KafkaError object associated with the request for this partition metadata.", "of all brokers that contain replicas of the partition. isr", "to the offset \"\"\" OffsetAndTimestamp = namedtuple(\"OffsetAndTimestamp\", [\"offset\", \"timestamp\"]) MemberInformation", "\"\"\"The Kafka offset commit API The Kafka offset commit API", "partition metadata describing the state in the MetadataResponse. Keyword Arguments:", "offset commit API The Kafka offset commit API allows users", "of the partition this metadata relates to. leader (int): The", "(int): The offset to be committed metadata (str): Non-null metadata", "topic partition metadata describing the state in the MetadataResponse. Keyword", "can be useful (for example) to store information about which", "\"\"\"A topic and partition tuple Keyword Arguments: topic (str): A", "the leader for the partition. replicas (List[int]): The ids of", "\"error\"]) \"\"\"The Kafka offset commit API The Kafka offset commit", "additional metadata (in the form of a string) when an", "leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata) [\"offset\", \"metadata\"]) \"\"\"An offset and timestamp", "Arguments: offset (int): The offset to be committed metadata (str):", "partition. error (KafkaError): A KafkaError object associated with the request", "an offset is committed. This can be useful (for example)", "partition this metadata relates to. partition (int): The id of", "offset and timestamp tuple Keyword Arguments: offset (int): An offset", "\"client_host\", \"member_metadata\", \"member_assignment\"]) GroupInformation = namedtuple(\"GroupInformation\", [\"error_code\", \"group\", \"state\", \"protocol_type\",", "(str): Non-null metadata \"\"\" OffsetAndMetadata = namedtuple(\"OffsetAndMetadata\", # TODO add", "of the partition. error (KafkaError): A KafkaError object associated with", "API The Kafka offset commit API allows users to provide", "this metadata relates to. leader (int): The id of the", "ids of all brokers that contain in-sync replicas of the", "partition (int): A partition id \"\"\" TopicPartition = namedtuple(\"TopicPartition\", [\"topic\",", "that contain replicas of the partition. isr (List[int]): The ids", "broker id. host (str): The Kafka broker hostname. port (int):", "by admin tools. Keyword Arguments: nodeID (int): The Kafka broker", "useful (for example) to store information about which node made", "to in rack aware partition assignment for fault tolerance. Examples:", "The Kafka broker port. rack (str): The rack of the", "used to in rack aware partition assignment for fault tolerance.", "(int): The timestamp associated to the offset \"\"\" OffsetAndTimestamp =", "name of the partition this metadata relates to. partition (int):", "topic (str): A topic name partition (int): A partition id" ]
[ "blob.counted = True vehicle_count += 1 # log count data", "if args.showdroi: frame = draw_roi(frame, droi) # save frame in", "'yolo' if args.detector == None else args.detector tracker = 'kcf'", "= cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height)) log_file_name = 'log.txt' with", "import get_counting_line, is_passed_counting_line # parse CLI arguments parser = argparse.ArgumentParser()", "log count data to a file (vehicle_id, count, datetime) if", "the tracker concludes \\ the tracked object has left the", "window, close log file and video objects if any cap.release()", "left the frame') parser.add_argument('--di', type=int, help='detection interval i.e number of", "droi_frame = get_roi_frame(frame, droi) initial_bboxes = get_bounding_boxes(droi_frame, detector) for box", "= 'yolo' if args.detector == None else args.detector tracker =", "cv2.resize(frame, (858, 480)) cv2.imshow('tracking', resized_frame) frame_counter += 1 # save", "count logs') parser.add_argument('--clposition', help='position of counting line (options: top, bottom,", "= cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes % 10 ==", "rerun detection droi_frame = get_roi_frame(frame, droi) boxes = get_bounding_boxes(droi_frame, detector)", "h) = [int(v) for v in blob.bounding_box] cv2.rectangle(frame, (x, y),", "\\ i.e number of tracking failures before the tracker concludes", "args.hideimage: resized_frame = cv2.resize(frame, (858, 480)) cv2.imshow('tracking', resized_frame) frame_counter +=", "frame_counter >= DETECTION_INTERVAL: # rerun detection droi_frame = get_roi_frame(frame, droi)", "cv2.rectangle(frame, (x, y), (x + w, y + h), (0,", "2, cv2.LINE_AA) # draw counting line cv2.line(frame, counting_line[0], counting_line[1], (0,", "= cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes % 10 == 0 or nframes", "import os import contextlib from datetime import datetime import argparse", "yolo)') parser.add_argument('--tracker', help='select a model/algorithm to use for vehicle tracking", "end video loop if on the last frame break #", "record counting if args.record: output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width,", "args.detector tracker = 'kcf' if args.tracker == None else args.tracker", "_ = frame.shape # init video object and log file", "0xFF == ord('q'): print('Video exited.') break # end capture, close", "help='record video and vehicle count logs') parser.add_argument('--clposition', help='position of counting", "= cv2.resize(frame, (858, 480)) cv2.imshow('tracking', resized_frame) frame_counter += 1 #", "= get_bounding_boxes(droi_frame, detector) for box in initial_bboxes: _blob = create_blob(box,", "get_roi_frame, draw_roi from counter import get_counting_line, is_passed_counting_line # parse CLI", "# show detection roi if args.showdroi: frame = draw_roi(frame, droi)", "camshift | default: kcf)') parser.add_argument('--record', action='store_true', help='record video and vehicle", "= 1 frame_counter = 0 DETECTION_INTERVAL = 10 if args.di", "== None else args.clposition counting_line = get_counting_line(clposition, f_width, f_height) vehicle_count", "args.record: output_video.write(frame) # visualize vehicle counting if not args.hideimage: resized_frame", "args.mctf detector = 'yolo' if args.detector == None else args.detector", "args.droi: droi = [] points = args.droi.replace(' ', '').split('|') for", "where you want detections to be made (format: 1,2|3,4|5,6|7,8|9,10 \\", "(0, 255, 0), 2, cv2.LINE_AA) # draw counting line cv2.line(frame,", "list(blobs.items()): # update trackers success, box = blob.tracker.update(frame) if success:", "# end video loop if 'q' key is pressed if", "frame') parser.add_argument('--di', type=int, help='detection interval i.e number of frames \\", "whole video frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay the detection roi on", "or nframes == 1: print(\"Processing {} of {} frames\".format(nframes,frame_count)) for", "CLI arguments parser = argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path to video", "if 's' key is pressed if k & 0xFF ==", "'kcf' if args.tracker == None else args.tracker f_height, f_width, _", "droi) boxes = get_bounding_boxes(droi_frame, detector) blobs, current_blob_id = add_new_blobs(boxes, blobs,", "is pressed if k & 0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_'", "of interest (ROI) \\ i.e a set of vertices that", "clposition = 'bottom' if args.clposition == None else args.clposition counting_line", "pressed if k & 0xFF == ord('q'): print('Video exited.') break", "'a') log_file.write('vehicle_id, count, datetime\\n') log_file.flush() # set counting line clposition", "- 2), cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA) #", "cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA) # draw counting", "else: print('End of video.') # end video loop if on", "region of interest (ROI) \\ i.e a set of vertices", "close window, close log file and video objects if any", "args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame =", "uuid import os import contextlib from datetime import datetime import", "camera') parser.add_argument('--droi', help='specify a detection region of interest (ROI) \\", "frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay the detection roi on the video')", "from trackers.tracker import create_blob, add_new_blobs, remove_duplicates import numpy as np", "count, datetime\\n') log_file.flush() # set counting line clposition = 'bottom'", "for box in initial_bboxes: _blob = create_blob(box, frame, tracker) blobs[blob_id]", "capture, close window, close log file and video objects if", "help='specify a detection region of interest (ROI) \\ i.e a", "detection is carried out again (in order to find new", "roi if args.showdroi: frame = draw_roi(frame, droi) # save frame", "None else args.mctf detector = 'yolo' if args.detector == None", "f_width, f_height) vehicle_count = 0 # create detection ROI droi", "+ uuid.uuid4().hex + '.png'), frame) print('Screenshot taken.') else: print('End of", "tracker concludes \\ the tracked object has left the frame')", "os.remove(log_file_name) log_file = open(log_file_name, 'a') log_file.write('vehicle_id, count, datetime\\n') log_file.flush() #", "get_bounding_boxes(droi_frame, detector) for box in initial_bboxes: _blob = create_blob(box, frame,", "video loop if 'q' key is pressed if k &", "save frame if 's' key is pressed if k &", "if any cap.release() if not args.hideimage: cv2.destroyAllWindows() if args.record: log_file.close()", "scene video video = int(args.video) if args.iscam else args.video cap", "0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex + '.png'), frame)", "= [] points = args.droi.replace(' ', '').split('|') for point_str in", "label blob bounding boxes for _id, blob in blobs.items(): (x,", "= add_new_blobs(boxes, blobs, frame, tracker, blob_id, counting_line, clposition) blob_id =", "draw_roi(frame, droi) # save frame in video output if args.record:", "you want detections to be made (format: 1,2|3,4|5,6|7,8|9,10 \\ default:", "= remove_duplicates(blobs) frame_counter = 0 # draw and label blob", "parser.add_argument('--droi', help='specify a detection region of interest (ROI) \\ i.e", "traffic scene') parser.add_argument('--iscam', action='store_true', help='specify if video capture is from", "get_bounding_boxes import uuid import os import contextlib from datetime import", "vehicles if is_passed_counting_line(blob.centroid, counting_line, clposition) and not blob.counted: blob.counted =", "= get_counting_line(clposition, f_width, f_height) vehicle_count = 0 # create detection", "(x + w, y + h), (0, 255, 0), 2)", "and create new blobs droi_frame = get_roi_frame(frame, droi) initial_bboxes =", "= cv2.waitKey(1) if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT):", "action='store_true', help='display/overlay the detection roi on the video') parser.add_argument('--mctf', type=int,", "not args.hideimage: resized_frame = cv2.resize(frame, (858, 480)) cv2.imshow('tracking', resized_frame) frame_counter", "vehicle count logs') parser.add_argument('--clposition', help='position of counting line (options: top,", "== None else args.tracker f_height, f_width, _ = frame.shape #", "line clposition = 'bottom' if args.clposition == None else args.clposition", "exited.') break # end capture, close window, close log file", "cv2.line(frame, counting_line[0], counting_line[1], (0, 255, 0), 3) # display vehicle", "cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes %", "{2}\\n'.format('v_' + str(_id), vehicle_count, datetime.now()) log_file.write(_row) log_file.flush() if frame_counter >=", "to use for vehicle tracking \\ (options: csrt, kcf, camshift", "datetime) if args.record: _row = '{0}, {1}, {2}\\n'.format('v_' + str(_id),", "del blobs[_id] # count vehicles if is_passed_counting_line(blob.centroid, counting_line, clposition) and", "is_passed_counting_line # parse CLI arguments parser = argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute", "args.clposition == None else args.clposition counting_line = get_counting_line(clposition, f_width, f_height)", "from counter import get_counting_line, is_passed_counting_line # parse CLI arguments parser", "scene') parser.add_argument('--iscam', action='store_true', help='specify if video capture is from a", "f_height)] if args.droi: droi = [] points = args.droi.replace(' ',", "== None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if args.mctf ==", "to be made (format: 1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e", "\\ before detection is carried out again (in order to", "(options: yolo, haarc, bgsub, ssd | default: yolo)') parser.add_argument('--tracker', help='select", "the detection roi on the video') parser.add_argument('--mctf', type=int, help='maximum consecutive", "if args.tracker == None else args.tracker f_height, f_width, _ =", "# init video object and log file to record counting", "tracker, blob_id, counting_line, clposition) blob_id = current_blob_id blobs = remove_duplicates(blobs)", "log_file.flush() # set counting line clposition = 'bottom' if args.clposition", "k = cv2.waitKey(1) if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 <", "0, 0), 2, cv2.LINE_AA) # show detection roi if args.showdroi:", "args.detector == None else args.detector tracker = 'kcf' if args.tracker", "draw_roi from counter import get_counting_line, is_passed_counting_line # parse CLI arguments", "log_file.flush() if frame_counter >= DETECTION_INTERVAL: # rerun detection droi_frame =", "# log count data to a file (vehicle_id, count, datetime)", "np from collections import OrderedDict from detectors.detector import get_bounding_boxes import", "(polygon) \\ where you want detections to be made (format:", "before the tracker concludes \\ the tracked object has left", "video') parser.add_argument('--mctf', type=int, help='maximum consecutive tracking failures \\ i.e number", "args.tracker == None else args.tracker f_height, f_width, _ = frame.shape", "for _id, blob in list(blobs.items()): # update trackers success, box", "resized_frame) frame_counter += 1 # save frame if 's' key", "k & 0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex +", "help='specify if video capture is from a camera') parser.add_argument('--droi', help='specify", "(0, 255, 0), 2) cv2.putText(frame, 'v_' + str(_id), (x, y", "be made (format: 1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the", "number of frames \\ before detection is carried out again", "+ '.png'), frame) print('Screenshot taken.') else: print('End of video.') #", "= open(log_file_name, 'a') log_file.write('vehicle_id, count, datetime\\n') log_file.flush() # set counting", "counting line cv2.line(frame, counting_line[0], counting_line[1], (0, 255, 0), 3) #", "_blob = create_blob(box, frame, tracker) blobs[blob_id] = _blob blob_id +=", "vehicle count cv2.putText(frame, 'Count: ' + str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX,", "and video objects if any cap.release() if not args.hideimage: cv2.destroyAllWindows()", "if args.record: output_video.write(frame) # visualize vehicle counting if not args.hideimage:", "args.droi.replace(' ', '').split('|') for point_str in points: point = tuple(map(int,", "point_str in points: point = tuple(map(int, point_str.split(','))) droi.append(point) # initialize", "objects if any cap.release() if not args.hideimage: cv2.destroyAllWindows() if args.record:", "create_blob(box, frame, tracker) blobs[blob_id] = _blob blob_id += 1 while", "y + h), (0, 255, 0), 2) cv2.putText(frame, 'v_' +", "2, (255, 0, 0), 2, cv2.LINE_AA) # show detection roi", "\\ and update the trackers of old ones)') parser.add_argument('--detector', help='select", "key is pressed if k & 0xFF == ord('q'): print('Video", "\\ the tracked object has left the frame') parser.add_argument('--di', type=int,", "# update trackers success, box = blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures", "droi) # save frame in video output if args.record: output_video.write(frame)", "update trackers success, box = blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures =", "success, box = blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures = 0 blob.update(box)", "f_height) vehicle_count = 0 # create detection ROI droi =", "help='maximum consecutive tracking failures \\ i.e number of tracking failures", "bottom, \\ left, right | default: bottom)') parser.add_argument('--hideimage', action='store_true', help='hide", "image') args = parser.parse_args() # capture traffic scene video video", "# initialize trackers and create new blobs droi_frame = get_roi_frame(frame,", "'v_' + str(_id), (x, y - 2), cv2.FONT_HERSHEY_DUPLEX, 1, (0,", "and vehicle count logs') parser.add_argument('--clposition', help='position of counting line (options:", "== 1: print(\"Processing {} of {} frames\".format(nframes,frame_count)) for _id, blob", "{} of {} frames\".format(nframes,frame_count)) for _id, blob in list(blobs.items()): #", "blob bounding boxes for _id, blob in blobs.items(): (x, y,", "0 # create detection ROI droi = [(0, 0), (f_width,", "# display vehicle count cv2.putText(frame, 'Count: ' + str(vehicle_count), (20,", "# capture traffic scene video video = int(args.video) if args.iscam", "the last frame break # end video loop if 'q'", "print('Video exited.') break # end capture, close window, close log", "2) cv2.putText(frame, 'v_' + str(_id), (x, y - 2), cv2.FONT_HERSHEY_DUPLEX,", "new vehicles \\ and update the trackers of old ones)')", "(858, 480)) cv2.imshow('tracking', resized_frame) frame_counter += 1 # save frame", "OrderedDict() blob_id = 1 frame_counter = 0 DETECTION_INTERVAL = 10", "remove_duplicates(blobs) frame_counter = 0 # draw and label blob bounding", "ROI droi = [(0, 0), (f_width, 0), (f_width, f_height), (0,", "= cv2.VideoCapture(video) _, frame = cap.read() # configs blobs =", "DETECTION_INTERVAL = 10 if args.di == None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES", "a detection region of interest (ROI) \\ i.e a set", "\\ (options: yolo, haarc, bgsub, ssd | default: yolo)') parser.add_argument('--tracker',", "frame break # end video loop if 'q' key is", "f_height, f_width, _ = frame.shape # init video object and", "= 'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file = open(log_file_name, 'a') log_file.write('vehicle_id,", "{} frames\".format(nframes,frame_count)) for _id, blob in list(blobs.items()): # update trackers", "if on the last frame break # end video loop", "[(0, 0), (f_width, 0), (f_width, f_height), (0, f_height)] if args.droi:", "capture is from a camera') parser.add_argument('--droi', help='specify a detection region", "else args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if args.mctf == None else", "file (vehicle_id, count, datetime) if args.record: _row = '{0}, {1},", "count data to a file (vehicle_id, count, datetime) if args.record:", "object and log file to record counting if args.record: output_video", "(f_width, 0), (f_width, f_height), (0, f_height)] if args.droi: droi =", "video capture is from a camera') parser.add_argument('--droi', help='specify a detection", "0), 2) cv2.putText(frame, 'v_' + str(_id), (x, y - 2),", "boxes for _id, blob in blobs.items(): (x, y, w, h)", "droi = [] points = args.droi.replace(' ', '').split('|') for point_str", "255, 0), 3) # display vehicle count cv2.putText(frame, 'Count: '", "= True vehicle_count += 1 # log count data to", "help='select a model/algorithm to use for vehicle detection \\ (options:", "cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height)) log_file_name = 'log.txt' with contextlib.suppress(FileNotFoundError):", "datetime import datetime import argparse from utils.detection_roi import get_roi_frame, draw_roi", "# parse CLI arguments parser = argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path", "droi_frame = get_roi_frame(frame, droi) boxes = get_bounding_boxes(droi_frame, detector) blobs, current_blob_id", "from collections import OrderedDict from detectors.detector import get_bounding_boxes import uuid", "frame in video output if args.record: output_video.write(frame) # visualize vehicle", "help='position of counting line (options: top, bottom, \\ left, right", "= frame.shape # init video object and log file to", "cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height)) log_file_name = 'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name)", "+ str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 2,", "of counting line (options: top, bottom, \\ left, right |", "else args.mctf detector = 'yolo' if args.detector == None else", "cv2.imshow('tracking', resized_frame) frame_counter += 1 # save frame if 's'", "blob_id = current_blob_id blobs = remove_duplicates(blobs) frame_counter = 0 #", "10 == 0 or nframes == 1: print(\"Processing {} of", "if args.di == None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if", "video output if args.record: output_video.write(frame) # visualize vehicle counting if", "concludes \\ the tracked object has left the frame') parser.add_argument('--di',", "'{0}, {1}, {2}\\n'.format('v_' + str(_id), vehicle_count, datetime.now()) log_file.write(_row) log_file.flush() if", "cv2 from trackers.tracker import create_blob, add_new_blobs, remove_duplicates import numpy as", "break # end capture, close window, close log file and", "0 DETECTION_INTERVAL = 10 if args.di == None else args.di", "cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes % 10 == 0 or nframes ==", "# set counting line clposition = 'bottom' if args.clposition ==", "frame) print('Screenshot taken.') else: print('End of video.') # end video", "\\ i.e a set of vertices that represent the area", "is carried out again (in order to find new vehicles", "interval i.e number of frames \\ before detection is carried", "csrt, kcf, camshift | default: kcf)') parser.add_argument('--record', action='store_true', help='record video", "if args.record: output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height)) log_file_name", "= 'bottom' if args.clposition == None else args.clposition counting_line =", "', '').split('|') for point_str in points: point = tuple(map(int, point_str.split(',')))", "1: print(\"Processing {} of {} frames\".format(nframes,frame_count)) for _id, blob in", "parser.add_argument('--mctf', type=int, help='maximum consecutive tracking failures \\ i.e number of", "to video or camera input of traffic scene') parser.add_argument('--iscam', action='store_true',", "help='relative/absolute path to video or camera input of traffic scene')", "of old ones)') parser.add_argument('--detector', help='select a model/algorithm to use for", "capture traffic scene video video = int(args.video) if args.iscam else", "parser.add_argument('--iscam', action='store_true', help='specify if video capture is from a camera')", "MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if args.mctf == None else args.mctf detector", "None else args.detector tracker = 'kcf' if args.tracker == None", "and update the trackers of old ones)') parser.add_argument('--detector', help='select a", "= 'kcf' if args.tracker == None else args.tracker f_height, f_width,", "counting if not args.hideimage: resized_frame = cv2.resize(frame, (858, 480)) cv2.imshow('tracking',", "as np from collections import OrderedDict from detectors.detector import get_bounding_boxes", "w, h) = [int(v) for v in blob.bounding_box] cv2.rectangle(frame, (x,", "print('Screenshot taken.') else: print('End of video.') # end video loop", "a model/algorithm to use for vehicle tracking \\ (options: csrt,", "None else args.tracker f_height, f_width, _ = frame.shape # init", "type=int, help='maximum consecutive tracking failures \\ i.e number of tracking", "vehicle tracking \\ (options: csrt, kcf, camshift | default: kcf)')", "vehicle_count, datetime.now()) log_file.write(_row) log_file.flush() if frame_counter >= DETECTION_INTERVAL: # rerun", "current_blob_id blobs = remove_duplicates(blobs) frame_counter = 0 # draw and", "in video output if args.record: output_video.write(frame) # visualize vehicle counting", "# visualize vehicle counting if not args.hideimage: resized_frame = cv2.resize(frame,", "blob.bounding_box] cv2.rectangle(frame, (x, y), (x + w, y + h),", "bottom)') parser.add_argument('--hideimage', action='store_true', help='hide resulting image') args = parser.parse_args() #", "if args.iscam else args.video cap = cv2.VideoCapture(video) _, frame =", "\\ (options: csrt, kcf, camshift | default: kcf)') parser.add_argument('--record', action='store_true',", "counting_line, clposition) blob_id = current_blob_id blobs = remove_duplicates(blobs) frame_counter =", "a set of vertices that represent the area (polygon) \\", "blobs = remove_duplicates(blobs) frame_counter = 0 # draw and label", "nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes % 10", "detection roi if args.showdroi: frame = draw_roi(frame, droi) # save", "255, 0), 2) cv2.putText(frame, 'v_' + str(_id), (x, y -", "counting_line = get_counting_line(clposition, f_width, f_height) vehicle_count = 0 # create", "get_roi_frame(frame, droi) initial_bboxes = get_bounding_boxes(droi_frame, detector) for box in initial_bboxes:", "vehicle_count += 1 # log count data to a file", "datetime import argparse from utils.detection_roi import get_roi_frame, draw_roi from counter", "cap = cv2.VideoCapture(video) _, frame = cap.read() # configs blobs", "import argparse from utils.detection_roi import get_roi_frame, draw_roi from counter import", "frame_counter = 0 DETECTION_INTERVAL = 10 if args.di == None", "initialize trackers and create new blobs droi_frame = get_roi_frame(frame, droi)", "detection region of interest (ROI) \\ i.e a set of", "{1}, {2}\\n'.format('v_' + str(_id), vehicle_count, datetime.now()) log_file.write(_row) log_file.flush() if frame_counter", "parse CLI arguments parser = argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path to", "parser.add_argument('--record', action='store_true', help='record video and vehicle count logs') parser.add_argument('--clposition', help='position", "ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex + '.png'), frame) print('Screenshot taken.')", "log_file.write('vehicle_id, count, datetime\\n') log_file.flush() # set counting line clposition =", "+= 1 # delete untracked blobs if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES:", "from detectors.detector import get_bounding_boxes import uuid import os import contextlib", "output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height)) log_file_name = 'log.txt'", "create new blobs droi_frame = get_roi_frame(frame, droi) initial_bboxes = get_bounding_boxes(droi_frame,", "roi on the video') parser.add_argument('--mctf', type=int, help='maximum consecutive tracking failures", "help='select a model/algorithm to use for vehicle tracking \\ (options:", "= 3 if args.mctf == None else args.mctf detector =", "(x, y, w, h) = [int(v) for v in blob.bounding_box]", "counting_line[1], (0, 255, 0), 3) # display vehicle count cv2.putText(frame,", "blobs.items(): (x, y, w, h) = [int(v) for v in", "vehicles \\ and update the trackers of old ones)') parser.add_argument('--detector',", "trackers and create new blobs droi_frame = get_roi_frame(frame, droi) initial_bboxes", "boxes = get_bounding_boxes(droi_frame, detector) blobs, current_blob_id = add_new_blobs(boxes, blobs, frame,", "a file (vehicle_id, count, datetime) if args.record: _row = '{0},", "ones)') parser.add_argument('--detector', help='select a model/algorithm to use for vehicle detection", "args.clposition counting_line = get_counting_line(clposition, f_width, f_height) vehicle_count = 0 #", "i.e number of tracking failures before the tracker concludes \\", "detector) for box in initial_bboxes: _blob = create_blob(box, frame, tracker)", ">= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] # count vehicles if is_passed_counting_line(blob.centroid, counting_line,", "arguments parser = argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path to video or", "import uuid import os import contextlib from datetime import datetime", "if args.clposition == None else args.clposition counting_line = get_counting_line(clposition, f_width,", "args = parser.parse_args() # capture traffic scene video video =", "None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if args.mctf == None", "+= 1 # save frame if 's' key is pressed", "= 10 if args.di == None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES =", "the area (polygon) \\ where you want detections to be", "count vehicles if is_passed_counting_line(blob.centroid, counting_line, clposition) and not blob.counted: blob.counted", "in list(blobs.items()): # update trackers success, box = blob.tracker.update(frame) if", "import get_bounding_boxes import uuid import os import contextlib from datetime", "| default: bottom)') parser.add_argument('--hideimage', action='store_true', help='hide resulting image') args =", "(options: top, bottom, \\ left, right | default: bottom)') parser.add_argument('--hideimage',", "if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame", "point_str.split(','))) droi.append(point) # initialize trackers and create new blobs droi_frame", "frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes % 10 == 0 or", "import OrderedDict from detectors.detector import get_bounding_boxes import uuid import os", "blobs droi_frame = get_roi_frame(frame, droi) initial_bboxes = get_bounding_boxes(droi_frame, detector) for", "cv2.LINE_AA) # show detection roi if args.showdroi: frame = draw_roi(frame,", "bgsub, ssd | default: yolo)') parser.add_argument('--tracker', help='select a model/algorithm to", "on the last frame break # end video loop if", "blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] # count vehicles if is_passed_counting_line(blob.centroid,", "last frame break # end video loop if 'q' key", "k & 0xFF == ord('q'): print('Video exited.') break # end", "(0, 255, 0), 3) # display vehicle count cv2.putText(frame, 'Count:", "== ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex + '.png'), frame) print('Screenshot", "import contextlib from datetime import datetime import argparse from utils.detection_roi", "= argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path to video or camera input", "break # end video loop if 'q' key is pressed", "+= 1 # log count data to a file (vehicle_id,", "output if args.record: output_video.write(frame) # visualize vehicle counting if not", "if nframes % 10 == 0 or nframes == 1:", "for _id, blob in blobs.items(): (x, y, w, h) =", "if frame_counter >= DETECTION_INTERVAL: # rerun detection droi_frame = get_roi_frame(frame,", "and label blob bounding boxes for _id, blob in blobs.items():", "cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame = cap.read() nframes", "(0, f_height)] if args.droi: droi = [] points = args.droi.replace('", "display vehicle count cv2.putText(frame, 'Count: ' + str(vehicle_count), (20, 60),", "(in order to find new vehicles \\ and update the", "= tuple(map(int, point_str.split(','))) droi.append(point) # initialize trackers and create new", "os import contextlib from datetime import datetime import argparse from", "f_height)) log_file_name = 'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file = open(log_file_name,", "1 # log count data to a file (vehicle_id, count,", "data to a file (vehicle_id, count, datetime) if args.record: _row", "log_file.write(_row) log_file.flush() if frame_counter >= DETECTION_INTERVAL: # rerun detection droi_frame", "+ 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame = cap.read() nframes =", "'').split('|') for point_str in points: point = tuple(map(int, point_str.split(','))) droi.append(point)", "log file and video objects if any cap.release() if not", "box in initial_bboxes: _blob = create_blob(box, frame, tracker) blobs[blob_id] =", "if args.detector == None else args.detector tracker = 'kcf' if", "args.video cap = cv2.VideoCapture(video) _, frame = cap.read() # configs", "of tracking failures before the tracker concludes \\ the tracked", "ssd | default: yolo)') parser.add_argument('--tracker', help='select a model/algorithm to use", "frame = cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if", "+ h), (0, 255, 0), 2) cv2.putText(frame, 'v_' + str(_id),", "0), (f_width, f_height), (0, f_height)] if args.droi: droi = []", "# save frame in video output if args.record: output_video.write(frame) #", "blob_id, counting_line, clposition) blob_id = current_blob_id blobs = remove_duplicates(blobs) frame_counter", "numpy as np from collections import OrderedDict from detectors.detector import", "str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 2, cv2.LINE_AA)", "= args.droi.replace(' ', '').split('|') for point_str in points: point =", "droi) initial_bboxes = get_bounding_boxes(droi_frame, detector) for box in initial_bboxes: _blob", "for point_str in points: point = tuple(map(int, point_str.split(','))) droi.append(point) #", "set counting line clposition = 'bottom' if args.clposition == None", "\\ left, right | default: bottom)') parser.add_argument('--hideimage', action='store_true', help='hide resulting", "MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] # count vehicles if is_passed_counting_line(blob.centroid, counting_line, clposition)", "blobs, frame, tracker, blob_id, counting_line, clposition) blob_id = current_blob_id blobs", "end video loop if 'q' key is pressed if k", "update the trackers of old ones)') parser.add_argument('--detector', help='select a model/algorithm", "frame, tracker) blobs[blob_id] = _blob blob_id += 1 while True:", "if 'q' key is pressed if k & 0xFF ==", "loop if 'q' key is pressed if k & 0xFF", "the trackers of old ones)') parser.add_argument('--detector', help='select a model/algorithm to", "blobs[_id] # count vehicles if is_passed_counting_line(blob.centroid, counting_line, clposition) and not", "# draw counting line cv2.line(frame, counting_line[0], counting_line[1], (0, 255, 0),", "[int(v) for v in blob.bounding_box] cv2.rectangle(frame, (x, y), (x +", "save frame in video output if args.record: output_video.write(frame) # visualize", "help='detection interval i.e number of frames \\ before detection is", "parser.add_argument('--clposition', help='position of counting line (options: top, bottom, \\ left,", "from utils.detection_roi import get_roi_frame, draw_roi from counter import get_counting_line, is_passed_counting_line", "if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] # count vehicles if", "log_file = open(log_file_name, 'a') log_file.write('vehicle_id, count, datetime\\n') log_file.flush() # set", "blob in blobs.items(): (x, y, w, h) = [int(v) for", "30, (f_width, f_height)) log_file_name = 'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file", "from datetime import datetime import argparse from utils.detection_roi import get_roi_frame,", "counting if args.record: output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height))", "(f_width, f_height), (0, f_height)] if args.droi: droi = [] points", "+ w, y + h), (0, 255, 0), 2) cv2.putText(frame,", "480)) cv2.imshow('tracking', resized_frame) frame_counter += 1 # save frame if", "= 0 # create detection ROI droi = [(0, 0),", "show detection roi if args.showdroi: frame = draw_roi(frame, droi) #", "the whole video frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay the detection roi", "while True: k = cv2.waitKey(1) if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) +", "_blob blob_id += 1 while True: k = cv2.waitKey(1) if", "parser.add_argument('--detector', help='select a model/algorithm to use for vehicle detection \\", "camera input of traffic scene') parser.add_argument('--iscam', action='store_true', help='specify if video", "clposition) and not blob.counted: blob.counted = True vehicle_count += 1", "box = blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures = 0 blob.update(box) else:", "< cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame = cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count", "(255, 0, 0), 2, cv2.LINE_AA) # show detection roi if", "10 if args.di == None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3", "kcf, camshift | default: kcf)') parser.add_argument('--record', action='store_true', help='record video and", "current_blob_id = add_new_blobs(boxes, blobs, frame, tracker, blob_id, counting_line, clposition) blob_id", "blobs, current_blob_id = add_new_blobs(boxes, blobs, frame, tracker, blob_id, counting_line, clposition)", "3 if args.mctf == None else args.mctf detector = 'yolo'", "video = int(args.video) if args.iscam else args.video cap = cv2.VideoCapture(video)", "droi.append(point) # initialize trackers and create new blobs droi_frame =", "_row = '{0}, {1}, {2}\\n'.format('v_' + str(_id), vehicle_count, datetime.now()) log_file.write(_row)", "in blobs.items(): (x, y, w, h) = [int(v) for v", "consecutive tracking failures \\ i.e number of tracking failures before", "video objects if any cap.release() if not args.hideimage: cv2.destroyAllWindows() if", "# delete untracked blobs if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id]", "tracker) blobs[blob_id] = _blob blob_id += 1 while True: k", "cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 2, cv2.LINE_AA) # show detection", "import datetime import argparse from utils.detection_roi import get_roi_frame, draw_roi from", "3) # display vehicle count cv2.putText(frame, 'Count: ' + str(vehicle_count),", "'Count: ' + str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0,", "else args.clposition counting_line = get_counting_line(clposition, f_width, f_height) vehicle_count = 0", "loop if on the last frame break # end video", "args.tracker f_height, f_width, _ = frame.shape # init video object", "top, bottom, \\ left, right | default: bottom)') parser.add_argument('--hideimage', action='store_true',", "right | default: bottom)') parser.add_argument('--hideimage', action='store_true', help='hide resulting image') args", "haarc, bgsub, ssd | default: yolo)') parser.add_argument('--tracker', help='select a model/algorithm", "= 0 # draw and label blob bounding boxes for", "frame, tracker, blob_id, counting_line, clposition) blob_id = current_blob_id blobs =", "frames \\ before detection is carried out again (in order", "nframes % 10 == 0 or nframes == 1: print(\"Processing", "any cap.release() if not args.hideimage: cv2.destroyAllWindows() if args.record: log_file.close() output_video.release()", "blobs if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] # count vehicles", "(f_width, f_height)) log_file_name = 'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file =", "old ones)') parser.add_argument('--detector', help='select a model/algorithm to use for vehicle", "OrderedDict from detectors.detector import get_bounding_boxes import uuid import os import", "1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame = cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES)", "frame.shape # init video object and log file to record", "from a camera') parser.add_argument('--droi', help='specify a detection region of interest", "_, frame = cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT)", "carried out again (in order to find new vehicles \\", "bounding boxes for _id, blob in blobs.items(): (x, y, w,", "frame_counter += 1 # save frame if 's' key is", "in points: point = tuple(map(int, point_str.split(','))) droi.append(point) # initialize trackers", "blob_id += 1 while True: k = cv2.waitKey(1) if args.iscam", "clposition) blob_id = current_blob_id blobs = remove_duplicates(blobs) frame_counter = 0", "get_counting_line(clposition, f_width, f_height) vehicle_count = 0 # create detection ROI", "cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame = cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count =", "detections to be made (format: 1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\", "i.e number of frames \\ before detection is carried out", "0 or nframes == 1: print(\"Processing {} of {} frames\".format(nframes,frame_count))", "log file to record counting if args.record: output_video = cv2.VideoWriter('./videos/output.avi',", "(format: 1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the whole video", "y - 2), cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)", "video video = int(args.video) if args.iscam else args.video cap =", "0 # draw and label blob bounding boxes for _id,", "(20, 60), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 2, cv2.LINE_AA) #", "untracked blobs if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] # count", "= create_blob(box, frame, tracker) blobs[blob_id] = _blob blob_id += 1", "= _blob blob_id += 1 while True: k = cv2.waitKey(1)", "cv2.VideoCapture(video) _, frame = cap.read() # configs blobs = OrderedDict()", "on the video') parser.add_argument('--mctf', type=int, help='maximum consecutive tracking failures \\", "create detection ROI droi = [(0, 0), (f_width, 0), (f_width,", "tuple(map(int, point_str.split(','))) droi.append(point) # initialize trackers and create new blobs", "# save frame if 's' key is pressed if k", "% 10 == 0 or nframes == 1: print(\"Processing {}", "the frame') parser.add_argument('--di', type=int, help='detection interval i.e number of frames", "in initial_bboxes: _blob = create_blob(box, frame, tracker) blobs[blob_id] = _blob", "parser.parse_args() # capture traffic scene video video = int(args.video) if", "if args.mctf == None else args.mctf detector = 'yolo' if", "args.iscam else args.video cap = cv2.VideoCapture(video) _, frame = cap.read()", "= parser.parse_args() # capture traffic scene video video = int(args.video)", "initial_bboxes: _blob = create_blob(box, frame, tracker) blobs[blob_id] = _blob blob_id", "args.record: output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30, (f_width, f_height)) log_file_name =", "frame = cap.read() # configs blobs = OrderedDict() blob_id =", "line (options: top, bottom, \\ left, right | default: bottom)')", "the tracked object has left the frame') parser.add_argument('--di', type=int, help='detection", "get_bounding_boxes(droi_frame, detector) blobs, current_blob_id = add_new_blobs(boxes, blobs, frame, tracker, blob_id,", "blob in list(blobs.items()): # update trackers success, box = blob.tracker.update(frame)", "| default: kcf)') parser.add_argument('--record', action='store_true', help='record video and vehicle count", "h), (0, 255, 0), 2) cv2.putText(frame, 'v_' + str(_id), (x,", "tracker = 'kcf' if args.tracker == None else args.tracker f_height,", "'ss_' + uuid.uuid4().hex + '.png'), frame) print('Screenshot taken.') else: print('End", "is from a camera') parser.add_argument('--droi', help='specify a detection region of", "nframes == 1: print(\"Processing {} of {} frames\".format(nframes,frame_count)) for _id,", "if not args.hideimage: resized_frame = cv2.resize(frame, (858, 480)) cv2.imshow('tracking', resized_frame)", "if success: blob.num_consecutive_tracking_failures = 0 blob.update(box) else: blob.num_consecutive_tracking_failures += 1", "= [int(v) for v in blob.bounding_box] cv2.rectangle(frame, (x, y), (x", "new blobs droi_frame = get_roi_frame(frame, droi) initial_bboxes = get_bounding_boxes(droi_frame, detector)", "a camera') parser.add_argument('--droi', help='specify a detection region of interest (ROI)", "taken.') else: print('End of video.') # end video loop if", "frame = draw_roi(frame, droi) # save frame in video output", "parser = argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path to video or camera", "yolo, haarc, bgsub, ssd | default: yolo)') parser.add_argument('--tracker', help='select a", "use for vehicle detection \\ (options: yolo, haarc, bgsub, ssd", "default: kcf)') parser.add_argument('--record', action='store_true', help='record video and vehicle count logs')", "_, frame = cap.read() # configs blobs = OrderedDict() blob_id", "args.di == None else args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if args.mctf", "'bottom' if args.clposition == None else args.clposition counting_line = get_counting_line(clposition,", "= draw_roi(frame, droi) # save frame in video output if", "0), 3) # display vehicle count cv2.putText(frame, 'Count: ' +", "cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex + '.png'), frame) print('Screenshot taken.') else:", "parser.add_argument('--hideimage', action='store_true', help='hide resulting image') args = parser.parse_args() # capture", "1 while True: k = cv2.waitKey(1) if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES)", "default: bottom)') parser.add_argument('--hideimage', action='store_true', help='hide resulting image') args = parser.parse_args()", "draw counting line cv2.line(frame, counting_line[0], counting_line[1], (0, 255, 0), 3)", "video object and log file to record counting if args.record:", "for vehicle detection \\ (options: yolo, haarc, bgsub, ssd |", "== ord('q'): print('Video exited.') break # end capture, close window,", "line cv2.line(frame, counting_line[0], counting_line[1], (0, 255, 0), 3) # display", "detector) blobs, current_blob_id = add_new_blobs(boxes, blobs, frame, tracker, blob_id, counting_line,", "add_new_blobs(boxes, blobs, frame, tracker, blob_id, counting_line, clposition) blob_id = current_blob_id", "or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _, frame = cap.read()", "= 0 blob.update(box) else: blob.num_consecutive_tracking_failures += 1 # delete untracked", "interest (ROI) \\ i.e a set of vertices that represent", "None else args.clposition counting_line = get_counting_line(clposition, f_width, f_height) vehicle_count =", "action='store_true', help='record video and vehicle count logs') parser.add_argument('--clposition', help='position of", "resulting image') args = parser.parse_args() # capture traffic scene video", "if is_passed_counting_line(blob.centroid, counting_line, clposition) and not blob.counted: blob.counted = True", "2), cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA) # draw", "argparse.ArgumentParser() parser.add_argument('video', help='relative/absolute path to video or camera input of", "with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file = open(log_file_name, 'a') log_file.write('vehicle_id, count, datetime\\n')", "= OrderedDict() blob_id = 1 frame_counter = 0 DETECTION_INTERVAL =", "initial_bboxes = get_bounding_boxes(droi_frame, detector) for box in initial_bboxes: _blob =", "== None else args.detector tracker = 'kcf' if args.tracker ==", "args.di MAX_CONSECUTIVE_TRACKING_FAILURES = 3 if args.mctf == None else args.mctf", "== None else args.mctf detector = 'yolo' if args.detector ==", "logs') parser.add_argument('--clposition', help='position of counting line (options: top, bottom, \\", "'s' key is pressed if k & 0xFF == ord('s'):", "kcf)') parser.add_argument('--record', action='store_true', help='record video and vehicle count logs') parser.add_argument('--clposition',", "else args.video cap = cv2.VideoCapture(video) _, frame = cap.read() #", "log_file_name = 'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file = open(log_file_name, 'a')", "blob_id = 1 frame_counter = 0 DETECTION_INTERVAL = 10 if", "1 # delete untracked blobs if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del", "import cv2 from trackers.tracker import create_blob, add_new_blobs, remove_duplicates import numpy", "get_counting_line, is_passed_counting_line # parse CLI arguments parser = argparse.ArgumentParser() parser.add_argument('video',", "else args.tracker f_height, f_width, _ = frame.shape # init video", "tracking failures before the tracker concludes \\ the tracked object", "model/algorithm to use for vehicle tracking \\ (options: csrt, kcf,", "frame_counter = 0 # draw and label blob bounding boxes", "resized_frame = cv2.resize(frame, (858, 480)) cv2.imshow('tracking', resized_frame) frame_counter += 1", "i.e a set of vertices that represent the area (polygon)", "(vehicle_id, count, datetime) if args.record: _row = '{0}, {1}, {2}\\n'.format('v_'", "is pressed if k & 0xFF == ord('q'): print('Video exited.')", "cap.read() # configs blobs = OrderedDict() blob_id = 1 frame_counter", "blob.num_consecutive_tracking_failures = 0 blob.update(box) else: blob.num_consecutive_tracking_failures += 1 # delete", "tracking failures \\ i.e number of tracking failures before the", "is_passed_counting_line(blob.centroid, counting_line, clposition) and not blob.counted: blob.counted = True vehicle_count", "utils.detection_roi import get_roi_frame, draw_roi from counter import get_counting_line, is_passed_counting_line #", "to use for vehicle detection \\ (options: yolo, haarc, bgsub,", "failures \\ i.e number of tracking failures before the tracker", "= cap.read() # configs blobs = OrderedDict() blob_id = 1", "cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes % 10 == 0", "cv2.putText(frame, 'v_' + str(_id), (x, y - 2), cv2.FONT_HERSHEY_DUPLEX, 1,", "video or camera input of traffic scene') parser.add_argument('--iscam', action='store_true', help='specify", "# rerun detection droi_frame = get_roi_frame(frame, droi) boxes = get_bounding_boxes(droi_frame,", "vehicle counting if not args.hideimage: resized_frame = cv2.resize(frame, (858, 480))", "tracking \\ (options: csrt, kcf, camshift | default: kcf)') parser.add_argument('--record',", "traffic scene video video = int(args.video) if args.iscam else args.video", "default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the whole video frame])') parser.add_argument('--showdroi', action='store_true',", "not blob.counted: blob.counted = True vehicle_count += 1 # log", "made (format: 1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the whole", "input of traffic scene') parser.add_argument('--iscam', action='store_true', help='specify if video capture", "represent the area (polygon) \\ where you want detections to", "= int(args.video) if args.iscam else args.video cap = cv2.VideoCapture(video) _,", "= get_bounding_boxes(droi_frame, detector) blobs, current_blob_id = add_new_blobs(boxes, blobs, frame, tracker,", "import numpy as np from collections import OrderedDict from detectors.detector", "_id, blob in list(blobs.items()): # update trackers success, box =", "\\ where you want detections to be made (format: 1,2|3,4|5,6|7,8|9,10", "f_height), (0, f_height)] if args.droi: droi = [] points =", "0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the whole video frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay", "vehicle_count = 0 # create detection ROI droi = [(0,", "& 0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex + '.png'),", "point = tuple(map(int, point_str.split(','))) droi.append(point) # initialize trackers and create", "# configs blobs = OrderedDict() blob_id = 1 frame_counter =", "(x, y), (x + w, y + h), (0, 255,", "detection \\ (options: yolo, haarc, bgsub, ssd | default: yolo)')", "left, right | default: bottom)') parser.add_argument('--hideimage', action='store_true', help='hide resulting image')", "find new vehicles \\ and update the trackers of old", "order to find new vehicles \\ and update the trackers", "output_video.write(frame) # visualize vehicle counting if not args.hideimage: resized_frame =", "if k & 0xFF == ord('q'): print('Video exited.') break #", "0), (f_width, 0), (f_width, f_height), (0, f_height)] if args.droi: droi", "of video.') # end video loop if on the last", "open(log_file_name, 'a') log_file.write('vehicle_id, count, datetime\\n') log_file.flush() # set counting line", "1 frame_counter = 0 DETECTION_INTERVAL = 10 if args.di ==", "parser.add_argument('--di', type=int, help='detection interval i.e number of frames \\ before", "if k & 0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' + uuid.uuid4().hex", "detection ROI droi = [(0, 0), (f_width, 0), (f_width, f_height),", "init video object and log file to record counting if", "count, datetime) if args.record: _row = '{0}, {1}, {2}\\n'.format('v_' +", "if args.record: _row = '{0}, {1}, {2}\\n'.format('v_' + str(_id), vehicle_count,", "action='store_true', help='hide resulting image') args = parser.parse_args() # capture traffic", "# draw and label blob bounding boxes for _id, blob", "(options: csrt, kcf, camshift | default: kcf)') parser.add_argument('--record', action='store_true', help='record", "remove_duplicates import numpy as np from collections import OrderedDict from", "trackers of old ones)') parser.add_argument('--detector', help='select a model/algorithm to use", "get_roi_frame(frame, droi) boxes = get_bounding_boxes(droi_frame, detector) blobs, current_blob_id = add_new_blobs(boxes,", "video and vehicle count logs') parser.add_argument('--clposition', help='position of counting line", "vehicle detection \\ (options: yolo, haarc, bgsub, ssd | default:", "path to video or camera input of traffic scene') parser.add_argument('--iscam',", "and not blob.counted: blob.counted = True vehicle_count += 1 #", "255, 0), 2, cv2.LINE_AA) # draw counting line cv2.line(frame, counting_line[0],", "counting_line[0], counting_line[1], (0, 255, 0), 3) # display vehicle count", "60), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 2, cv2.LINE_AA) # show", "if args.droi: droi = [] points = args.droi.replace(' ', '').split('|')", "+= 1 while True: k = cv2.waitKey(1) if args.iscam or", "+ str(_id), vehicle_count, datetime.now()) log_file.write(_row) log_file.flush() if frame_counter >= DETECTION_INTERVAL:", "points: point = tuple(map(int, point_str.split(','))) droi.append(point) # initialize trackers and", "v in blob.bounding_box] cv2.rectangle(frame, (x, y), (x + w, y", "blobs[blob_id] = _blob blob_id += 1 while True: k =", "'q' key is pressed if k & 0xFF == ord('q'):", "type=int, help='detection interval i.e number of frames \\ before detection", "DETECTION_INTERVAL: # rerun detection droi_frame = get_roi_frame(frame, droi) boxes =", "pressed if k & 0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots', 'ss_' +", "y, w, h) = [int(v) for v in blob.bounding_box] cv2.rectangle(frame,", "1, (0, 255, 0), 2, cv2.LINE_AA) # draw counting line", "of traffic scene') parser.add_argument('--iscam', action='store_true', help='specify if video capture is", "tracked object has left the frame') parser.add_argument('--di', type=int, help='detection interval", "f_width, _ = frame.shape # init video object and log", "frames\".format(nframes,frame_count)) for _id, blob in list(blobs.items()): # update trackers success,", "= [(0, 0), (f_width, 0), (f_width, f_height), (0, f_height)] if", "= blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures = 0 blob.update(box) else: blob.num_consecutive_tracking_failures", "configs blobs = OrderedDict() blob_id = 1 frame_counter = 0", "a model/algorithm to use for vehicle detection \\ (options: yolo,", "= '{0}, {1}, {2}\\n'.format('v_' + str(_id), vehicle_count, datetime.now()) log_file.write(_row) log_file.flush()", "file and video objects if any cap.release() if not args.hideimage:", "visualize vehicle counting if not args.hideimage: resized_frame = cv2.resize(frame, (858,", "blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures = 0 blob.update(box) else: blob.num_consecutive_tracking_failures +=", "# count vehicles if is_passed_counting_line(blob.centroid, counting_line, clposition) and not blob.counted:", "out again (in order to find new vehicles \\ and", "(ROI) \\ i.e a set of vertices that represent the", "number of tracking failures before the tracker concludes \\ the", "to record counting if args.record: output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'), 30,", "parser.add_argument('video', help='relative/absolute path to video or camera input of traffic", "cv2.putText(frame, 'Count: ' + str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX, 2, (255,", "args.showdroi: frame = draw_roi(frame, droi) # save frame in video", "' + str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0),", "model/algorithm to use for vehicle detection \\ (options: yolo, haarc,", "trackers.tracker import create_blob, add_new_blobs, remove_duplicates import numpy as np from", "| default: yolo)') parser.add_argument('--tracker', help='select a model/algorithm to use for", "before detection is carried out again (in order to find", "trackers success, box = blob.tracker.update(frame) if success: blob.num_consecutive_tracking_failures = 0", "w, y + h), (0, 255, 0), 2) cv2.putText(frame, 'v_'", "'.png'), frame) print('Screenshot taken.') else: print('End of video.') # end", "= 0 DETECTION_INTERVAL = 10 if args.di == None else", "of vertices that represent the area (polygon) \\ where you", "want detections to be made (format: 1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height", "object has left the frame') parser.add_argument('--di', type=int, help='detection interval i.e", "file to record counting if args.record: output_video = cv2.VideoWriter('./videos/output.avi', cv2.VideoWriter_fourcc('M','J','P','G'),", "ord('q'): print('Video exited.') break # end capture, close window, close", "[] points = args.droi.replace(' ', '').split('|') for point_str in points:", "detectors.detector import get_bounding_boxes import uuid import os import contextlib from", "[i.e the whole video frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay the detection", "droi = [(0, 0), (f_width, 0), (f_width, f_height), (0, f_height)]", "set of vertices that represent the area (polygon) \\ where", "# end video loop if on the last frame break", "blob.counted: blob.counted = True vehicle_count += 1 # log count", "True: k = cv2.waitKey(1) if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1", "parser.add_argument('--showdroi', action='store_true', help='display/overlay the detection roi on the video') parser.add_argument('--mctf',", "argparse from utils.detection_roi import get_roi_frame, draw_roi from counter import get_counting_line,", "y), (x + w, y + h), (0, 255, 0),", "for vehicle tracking \\ (options: csrt, kcf, camshift | default:", "0), 2, cv2.LINE_AA) # show detection roi if args.showdroi: frame", "blobs = OrderedDict() blob_id = 1 frame_counter = 0 DETECTION_INTERVAL", "points = args.droi.replace(' ', '').split('|') for point_str in points: point", "default: yolo)') parser.add_argument('--tracker', help='select a model/algorithm to use for vehicle", "0 blob.update(box) else: blob.num_consecutive_tracking_failures += 1 # delete untracked blobs", "counting line clposition = 'bottom' if args.clposition == None else", "import create_blob, add_new_blobs, remove_duplicates import numpy as np from collections", "of {} frames\".format(nframes,frame_count)) for _id, blob in list(blobs.items()): # update", "True vehicle_count += 1 # log count data to a", "= current_blob_id blobs = remove_duplicates(blobs) frame_counter = 0 # draw", "2, cv2.LINE_AA) # show detection roi if args.showdroi: frame =", "failures before the tracker concludes \\ the tracked object has", "+ str(_id), (x, y - 2), cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255,", "help='hide resulting image') args = parser.parse_args() # capture traffic scene", "count cv2.putText(frame, 'Count: ' + str(vehicle_count), (20, 60), cv2.FONT_HERSHEY_DUPLEX, 2,", "counting line (options: top, bottom, \\ left, right | default:", "help='display/overlay the detection roi on the video') parser.add_argument('--mctf', type=int, help='maximum", "print(\"Processing {} of {} frames\".format(nframes,frame_count)) for _id, blob in list(blobs.items()):", "if video capture is from a camera') parser.add_argument('--droi', help='specify a", "has left the frame') parser.add_argument('--di', type=int, help='detection interval i.e number", "for v in blob.bounding_box] cv2.rectangle(frame, (x, y), (x + w,", "contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file = open(log_file_name, 'a') log_file.write('vehicle_id, count, datetime\\n') log_file.flush()", "\\ [i.e the whole video frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay the", "'log.txt' with contextlib.suppress(FileNotFoundError): os.remove(log_file_name) log_file = open(log_file_name, 'a') log_file.write('vehicle_id, count,", ">= DETECTION_INTERVAL: # rerun detection droi_frame = get_roi_frame(frame, droi) boxes", "cv2.LINE_AA) # draw counting line cv2.line(frame, counting_line[0], counting_line[1], (0, 255,", "1 # save frame if 's' key is pressed if", "\\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the whole video frame])') parser.add_argument('--showdroi',", "detection roi on the video') parser.add_argument('--mctf', type=int, help='maximum consecutive tracking", "to find new vehicles \\ and update the trackers of", "# create detection ROI droi = [(0, 0), (f_width, 0),", "delete untracked blobs if blob.num_consecutive_tracking_failures >= MAX_CONSECUTIVE_TRACKING_FAILURES: del blobs[_id] #", "uuid.uuid4().hex + '.png'), frame) print('Screenshot taken.') else: print('End of video.')", "use for vehicle tracking \\ (options: csrt, kcf, camshift |", "or camera input of traffic scene') parser.add_argument('--iscam', action='store_true', help='specify if", "detector = 'yolo' if args.detector == None else args.detector tracker", "str(_id), vehicle_count, datetime.now()) log_file.write(_row) log_file.flush() if frame_counter >= DETECTION_INTERVAL: #", "import get_roi_frame, draw_roi from counter import get_counting_line, is_passed_counting_line # parse", "key is pressed if k & 0xFF == ord('s'): cv2.imwrite(os.path.join('screenshots',", "to a file (vehicle_id, count, datetime) if args.record: _row =", "_id, blob in blobs.items(): (x, y, w, h) = [int(v)", "(x, y - 2), cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255, 0), 2,", "blob.num_consecutive_tracking_failures += 1 # delete untracked blobs if blob.num_consecutive_tracking_failures >=", "that represent the area (polygon) \\ where you want detections", "else: blob.num_consecutive_tracking_failures += 1 # delete untracked blobs if blob.num_consecutive_tracking_failures", "draw and label blob bounding boxes for _id, blob in", "vertices that represent the area (polygon) \\ where you want", "else args.detector tracker = 'kcf' if args.tracker == None else", "int(args.video) if args.iscam else args.video cap = cv2.VideoCapture(video) _, frame", "video.') # end video loop if on the last frame", "args.record: _row = '{0}, {1}, {2}\\n'.format('v_' + str(_id), vehicle_count, datetime.now())", "cv2.waitKey(1) if args.iscam or cap.get(cv2.CAP_PROP_POS_FRAMES) + 1 < cap.get(cv2.CAP_PROP_FRAME_COUNT): _,", "create_blob, add_new_blobs, remove_duplicates import numpy as np from collections import", "again (in order to find new vehicles \\ and update", "datetime\\n') log_file.flush() # set counting line clposition = 'bottom' if", "str(_id), (x, y - 2), cv2.FONT_HERSHEY_DUPLEX, 1, (0, 255, 0),", "= get_roi_frame(frame, droi) boxes = get_bounding_boxes(droi_frame, detector) blobs, current_blob_id =", "in blob.bounding_box] cv2.rectangle(frame, (x, y), (x + w, y +", "action='store_true', help='specify if video capture is from a camera') parser.add_argument('--droi',", "# end capture, close window, close log file and video", "& 0xFF == ord('q'): print('Video exited.') break # end capture,", "blob.update(box) else: blob.num_consecutive_tracking_failures += 1 # delete untracked blobs if", "end capture, close window, close log file and video objects", "1,2|3,4|5,6|7,8|9,10 \\ default: 0,0|frame_width,0|frame_width,frame_height|0,frame_height \\ [i.e the whole video frame])')", "area (polygon) \\ where you want detections to be made", "0), 2, cv2.LINE_AA) # draw counting line cv2.line(frame, counting_line[0], counting_line[1],", "of frames \\ before detection is carried out again (in", "counting_line, clposition) and not blob.counted: blob.counted = True vehicle_count +=", "the video') parser.add_argument('--mctf', type=int, help='maximum consecutive tracking failures \\ i.e", "add_new_blobs, remove_duplicates import numpy as np from collections import OrderedDict", "video loop if on the last frame break # end", "counter import get_counting_line, is_passed_counting_line # parse CLI arguments parser =", "= cap.read() nframes = cap.get(cv2.CAP_PROP_POS_FRAMES) frame_count = cap.get(cv2.CAP_PROP_FRAME_COUNT) if nframes", "and log file to record counting if args.record: output_video =", "== 0 or nframes == 1: print(\"Processing {} of {}", "detection droi_frame = get_roi_frame(frame, droi) boxes = get_bounding_boxes(droi_frame, detector) blobs,", "datetime.now()) log_file.write(_row) log_file.flush() if frame_counter >= DETECTION_INTERVAL: # rerun detection", "video frame])') parser.add_argument('--showdroi', action='store_true', help='display/overlay the detection roi on the", "close log file and video objects if any cap.release() if", "contextlib from datetime import datetime import argparse from utils.detection_roi import", "= get_roi_frame(frame, droi) initial_bboxes = get_bounding_boxes(droi_frame, detector) for box in", "parser.add_argument('--tracker', help='select a model/algorithm to use for vehicle tracking \\", "frame if 's' key is pressed if k & 0xFF", "print('End of video.') # end video loop if on the", "collections import OrderedDict from detectors.detector import get_bounding_boxes import uuid import", "args.mctf == None else args.mctf detector = 'yolo' if args.detector", "success: blob.num_consecutive_tracking_failures = 0 blob.update(box) else: blob.num_consecutive_tracking_failures += 1 #" ]
[ "from models.user.user import User from models.user.authenticated_user import AuthenticatedUser class MagicCastleAPI(ApiView):", "put(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) json_data = request.get_json()", "not json_data: raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification()", "] else: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value,", "type(user) == AuthenticatedUser: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\":", "user.get_all_magic_castles() ] else: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\":", "else: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\":", "{} else: magic_castle = user.create_empty_magic_castle() json_data = request.get_json() if not", "raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {}", "magic_castle = user.get_magic_castle_by_hostname(hostname) json_data = request.get_json() if not json_data: raise", "json_data: raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return", "user.get_all_magic_castles() ] def post(self, user: User, hostname, apply=False): if apply:", "def get(self, user: User, hostname): if hostname: magic_castle = user.get_magic_castle_by_hostname(hostname)", "import User from models.user.authenticated_user import AuthenticatedUser class MagicCastleAPI(ApiView): def get(self,", "{ **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(),", "in user.get_all_magic_castles() ] else: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(),", "} for magic_castle in user.get_all_magic_castles() ] else: return [ {", "magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {} else: magic_castle = user.create_empty_magic_castle()", "magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(), } for magic_castle", "resources.api_view import ApiView from exceptions.invalid_usage_exception import InvalidUsageException from models.user.user import", "for magic_castle in user.get_all_magic_castles() ] else: return [ { **magic_castle.dump_configuration(planned_only=True),", "user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {} else: magic_castle = user.create_empty_magic_castle() json_data =", "if apply: magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {} else: magic_castle", "\"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(), } for", "not json_data: raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation()", "user: User, hostname, apply=False): if apply: magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply()", "User, hostname): if hostname: magic_castle = user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else:", "] def post(self, user: User, hostname, apply=False): if apply: magic_castle", "json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {} def delete(self,", "request from resources.api_view import ApiView from exceptions.invalid_usage_exception import InvalidUsageException from", "magic_castle in user.get_all_magic_castles() ] else: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\":", "models.user.authenticated_user import AuthenticatedUser class MagicCastleAPI(ApiView): def get(self, user: User, hostname):", "\"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(), } for magic_castle in user.get_all_magic_castles() ]", "def post(self, user: User, hostname, apply=False): if apply: magic_castle =", "was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {} def delete(self, user: User,", "post(self, user: User, hostname, apply=False): if apply: magic_castle = user.get_magic_castle_by_hostname(hostname)", "else: magic_castle = user.create_empty_magic_castle() json_data = request.get_json() if not json_data:", "ApiView from exceptions.invalid_usage_exception import InvalidUsageException from models.user.user import User from", "models.user.user import User from models.user.authenticated_user import AuthenticatedUser class MagicCastleAPI(ApiView): def", "data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {} def delete(self, user:", "{} def delete(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.plan_destruction()", "return magic_castle.dump_configuration() else: if type(user) == AuthenticatedUser: return [ {", "user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) json_data = request.get_json() if", "if type(user) == AuthenticatedUser: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(),", "import AuthenticatedUser class MagicCastleAPI(ApiView): def get(self, user: User, hostname): if", "provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {} def put(self, user: User, hostname):", "magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {} def delete(self, user: User, hostname): magic_castle", "return {} def delete(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname)", "AuthenticatedUser: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\":", "= user.create_empty_magic_castle() json_data = request.get_json() if not json_data: raise InvalidUsageException(\"No", "\"owner\": magic_castle.get_owner_username(), } for magic_castle in user.get_all_magic_castles() ] else: return", "flask import request from resources.api_view import ApiView from exceptions.invalid_usage_exception import", "\"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), } for magic_castle in", "{} def put(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) json_data", "magic_castle.plan_modification() return {} def delete(self, user: User, hostname): magic_castle =", "= user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else: if type(user) == AuthenticatedUser: return", "magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(), } for magic_castle in user.get_all_magic_castles()", "exceptions.invalid_usage_exception import InvalidUsageException from models.user.user import User from models.user.authenticated_user import", "apply: magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {} else: magic_castle =", "**magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(), }", "magic_castle.apply() return {} else: magic_castle = user.create_empty_magic_castle() json_data = request.get_json()", "if not json_data: raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data)", "request.get_json() if not json_data: raise InvalidUsageException(\"No json data was provided\")", "<PASSWORD>(), \"owner\": magic_castle.get_owner_username(), } for magic_castle in user.get_all_magic_castles() ] else:", "return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(),", "return {} else: magic_castle = user.create_empty_magic_castle() json_data = request.get_json() if", "magic_castle.get_owner_username(), } for magic_castle in user.get_all_magic_castles() ] else: return [", "magic_castle = user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else: if type(user) == AuthenticatedUser:", "from resources.api_view import ApiView from exceptions.invalid_usage_exception import InvalidUsageException from models.user.user", "InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {} def", "} for magic_castle in user.get_all_magic_castles() ] def post(self, user: User,", "= user.get_magic_castle_by_hostname(hostname) json_data = request.get_json() if not json_data: raise InvalidUsageException(\"No", "magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), } for magic_castle in user.get_all_magic_castles() ] def", "AuthenticatedUser class MagicCastleAPI(ApiView): def get(self, user: User, hostname): if hostname:", "= request.get_json() if not json_data: raise InvalidUsageException(\"No json data was", "<filename>app/resources/magic_castle_api.py from flask import request from resources.api_view import ApiView from", "if hostname: magic_castle = user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else: if type(user)", "json_data: raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return", "from flask import request from resources.api_view import ApiView from exceptions.invalid_usage_exception", "user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else: if type(user) == AuthenticatedUser: return [", "user: User, hostname): if hostname: magic_castle = user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration()", "raise InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {}", "import request from resources.api_view import ApiView from exceptions.invalid_usage_exception import InvalidUsageException", "{ **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), } for", "in user.get_all_magic_castles() ] def post(self, user: User, hostname, apply=False): if", "\"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), } for magic_castle in user.get_all_magic_castles() ]", "\"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\": magic_castle.get_owner_username(), } for magic_castle in", "was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {} def put(self, user: User,", "import InvalidUsageException from models.user.user import User from models.user.authenticated_user import AuthenticatedUser", "User, hostname, apply=False): if apply: magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return", "hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) json_data = request.get_json() if not json_data:", "apply=False): if apply: magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {} else:", "json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {} def put(self,", "return {} def put(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname)", "from exceptions.invalid_usage_exception import InvalidUsageException from models.user.user import User from models.user.authenticated_user", "**magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), } for magic_castle", "magic_castle in user.get_all_magic_castles() ] def post(self, user: User, hostname, apply=False):", "class MagicCastleAPI(ApiView): def get(self, user: User, hostname): if hostname: magic_castle", "user.create_empty_magic_castle() json_data = request.get_json() if not json_data: raise InvalidUsageException(\"No json", "magic_castle.plan_creation() return {} def put(self, user: User, hostname): magic_castle =", "get(self, user: User, hostname): if hostname: magic_castle = user.get_magic_castle_by_hostname(hostname) return", "json_data = request.get_json() if not json_data: raise InvalidUsageException(\"No json data", "magic_castle.dump_configuration() else: if type(user) == AuthenticatedUser: return [ { **magic_castle.dump_configuration(planned_only=True),", "hostname): if hostname: magic_castle = user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else: if", "magic_castle = user.create_empty_magic_castle() json_data = request.get_json() if not json_data: raise", "def delete(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.plan_destruction() return", "from models.user.authenticated_user import AuthenticatedUser class MagicCastleAPI(ApiView): def get(self, user: User,", "else: if type(user) == AuthenticatedUser: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\":", "user.get_magic_castle_by_hostname(hostname) json_data = request.get_json() if not json_data: raise InvalidUsageException(\"No json", "[ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), \"owner\":", "delete(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.plan_destruction() return {}", "hostname: magic_castle = user.get_magic_castle_by_hostname(hostname) return magic_castle.dump_configuration() else: if type(user) ==", "<PASSWORD>(), } for magic_castle in user.get_all_magic_castles() ] def post(self, user:", "User from models.user.authenticated_user import AuthenticatedUser class MagicCastleAPI(ApiView): def get(self, user:", "provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {} def delete(self, user: User, hostname):", "[ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), }", "== AuthenticatedUser: return [ { **magic_castle.dump_configuration(planned_only=True), \"hostname\": magic_castle.get_hostname(), \"status\": magic_castle.get_status().value,", "magic_castle.get_hostname(), \"status\": magic_castle.get_status().value, \"freeipa_passwd\": <PASSWORD>(), } for magic_castle in user.get_all_magic_castles()", "def put(self, user: User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) json_data =", "= user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {} else: magic_castle = user.create_empty_magic_castle() json_data", "InvalidUsageException(\"No json data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_modification() return {} def", "import ApiView from exceptions.invalid_usage_exception import InvalidUsageException from models.user.user import User", "InvalidUsageException from models.user.user import User from models.user.authenticated_user import AuthenticatedUser class", "MagicCastleAPI(ApiView): def get(self, user: User, hostname): if hostname: magic_castle =", "for magic_castle in user.get_all_magic_castles() ] def post(self, user: User, hostname,", "magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {} def put(self, user: User, hostname): magic_castle", "hostname, apply=False): if apply: magic_castle = user.get_magic_castle_by_hostname(hostname) magic_castle.apply() return {}", "\"freeipa_passwd\": <PASSWORD>(), } for magic_castle in user.get_all_magic_castles() ] def post(self,", "User, hostname): magic_castle = user.get_magic_castle_by_hostname(hostname) json_data = request.get_json() if not", "data was provided\") magic_castle.set_configuration(json_data) magic_castle.plan_creation() return {} def put(self, user:" ]
[ "role[\"id\"] assert proposal[\"target\"] == user[\"user_id\"] assert proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) result = assert_api_success(response) assert", "url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) result = assert_api_success(response) assert result[\"proposal_id\"]", "== user[\"user_id\"] assert proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields():", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "from tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER = get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def", "json=data) response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) result", "proposal proposal = helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"] == result[\"proposal_id\"] assert", "import get_logger from tests.rbac import helper from tests.rbac.api.assertions import assert_api_error", "distributed under the License is distributed on an \"AS IS\"", "assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5) # temporary until API refactored to", "from tests.rbac.api.assertions import assert_api_success from tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER =", "result[\"proposal_id\"] time.sleep(0.5) # temporary until API refactored to return the", "= helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {} response =", "the License. # ----------------------------------------------------------------------------- \"\"\" Propose Role Add Member Test", "test_api_propose_role_member(): \"\"\" Test a user proposing to add themselves to", "permissions and # limitations under the License. # ----------------------------------------------------------------------------- \"\"\"", "Test a user proposing to add themselves to a role", "data = {\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data) response = requests.post(", "the specific language governing permissions and # limitations under the", "role = helper.api.role.create.new(user=owner) user = helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data", "user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\" Test proposing adding a", "Test proposing adding a member to a role with missing", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) assert_api_error(response, \"Bad", "to add themselves to a role \"\"\" owner = helper.api.user.current", "member to a role with missing fields \"\"\" role, _", "to a role with missing fields \"\"\" role, _ =", "assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert proposal[\"object\"] == role[\"id\"] assert proposal[\"target\"]", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "tests.rbac import helper from tests.rbac.api.assertions import assert_api_error from tests.rbac.api.assertions import", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "proposal[\"target\"] == user[\"user_id\"] assert proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def", "@pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\" Test proposing adding a member to", "LOGGER = get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member(): \"\"\" Test a", "proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert proposal[\"object\"] == role[\"id\"] assert proposal[\"target\"] ==", "assert result[\"proposal_id\"] time.sleep(0.5) # temporary until API refactored to return", "# ----------------------------------------------------------------------------- \"\"\" Propose Role Add Member Test \"\"\" #", "writing, software # distributed under the License is distributed on", "import requests import pytest from rbac.common.logs import get_logger from tests.rbac", "in writing, software # distributed under the License is distributed", "requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) assert_api_error(response, \"Bad Request: id", "you may not use this file except in compliance with", "with missing fields \"\"\" role, _ = helper.api.role.current user =", "@pytest.mark.api_role def test_api_propose_role_member(): \"\"\" Test a user proposing to add", "assert_api_success from tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER = get_logger(__name__) @pytest.mark.api @pytest.mark.api_role", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "user[\"token\"]}, json=data ) result = assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5) #", "test_api_propose_role_member_required_fields(): \"\"\" Test proposing adding a member to a role", "helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {} response = requests.post(", "\"\"\" owner = helper.api.user.current role = helper.api.role.create.new(user=owner) user = helper.api.user.current2", "disable=invalid-name import time import requests import pytest from rbac.common.logs import", "url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) assert_api_error(response, \"Bad Request: id field", "_ = helper.api.role.current user = helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data", "proposal = helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"] == result[\"proposal_id\"] assert proposal[\"status\"]", "headers={\"Authorization\": user[\"token\"]}, json=data ) assert_api_error(response, \"Bad Request: id field is", "to Hyperledger Sawtooth # # Licensed under the Apache License,", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "assert_api_error from tests.rbac.api.assertions import assert_api_success from tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER", "= get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member(): \"\"\" Test a user", "= helper.api.role.current user = helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data =", "# pylint: disable=invalid-name import time import requests import pytest from", "import time import requests import pytest from rbac.common.logs import get_logger", "json=data ) result = assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5) # temporary", "add themselves to a role \"\"\" owner = helper.api.user.current role", "tests.rbac.api.assertions import assert_api_error from tests.rbac.api.assertions import assert_api_success from tests.rbac.api.assertions import", "CONDITIONS OF ANY KIND, either express or implied. # See", "== \"OPEN\" assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert proposal[\"object\"] == role[\"id\"]", "role, _ = helper.api.role.current user = helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"])", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "rbac.common.logs import get_logger from tests.rbac import helper from tests.rbac.api.assertions import", "result[\"proposal_id\"] assert proposal[\"status\"] == \"OPEN\" assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) result =", "and # limitations under the License. # ----------------------------------------------------------------------------- \"\"\" Propose", "until API refactored to return the proposal proposal = helper.api.proposal.get(result[\"proposal_id\"],", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "\"\"\" # pylint: disable=invalid-name import time import requests import pytest", "== role[\"id\"] assert proposal[\"target\"] == user[\"user_id\"] assert proposal[\"opener\"] == user[\"user_id\"]", "= helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data) response", "Test \"\"\" # pylint: disable=invalid-name import time import requests import", "a role with missing fields \"\"\" role, _ = helper.api.role.current", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Propose Role Add Member Test \"\"\" # pylint: disable=invalid-name import", "Role Add Member Test \"\"\" # pylint: disable=invalid-name import time", "License for the specific language governing permissions and # limitations", "from rbac.common.logs import get_logger from tests.rbac import helper from tests.rbac.api.assertions", "assert proposal[\"object\"] == role[\"id\"] assert proposal[\"target\"] == user[\"user_id\"] assert proposal[\"opener\"]", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "user = helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\": user[\"user_id\"]}", "assert proposal[\"target\"] == user[\"user_id\"] assert proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role", "== \"ADD_ROLE_MEMBER\" assert proposal[\"object\"] == role[\"id\"] assert proposal[\"target\"] == user[\"user_id\"]", "pylint: disable=invalid-name import time import requests import pytest from rbac.common.logs", "import assert_api_post_requires_auth LOGGER = get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member(): \"\"\"", "user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data) response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]},", "# temporary until API refactored to return the proposal proposal", "2018 Contributors to Hyperledger Sawtooth # # Licensed under the", "time.sleep(0.5) # temporary until API refactored to return the proposal", "role \"\"\" owner = helper.api.user.current role = helper.api.role.create.new(user=owner) user =", "the License for the specific language governing permissions and #", "from tests.rbac import helper from tests.rbac.api.assertions import assert_api_error from tests.rbac.api.assertions", "(the \"License\"); # you may not use this file except", "= assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5) # temporary until API refactored", "helper.api.role.current user = helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {}", "Apache License, Version 2.0 (the \"License\"); # you may not", "owner = helper.api.user.current role = helper.api.role.create.new(user=owner) user = helper.api.user.current2 url", "# you may not use this file except in compliance", "the proposal proposal = helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"] == result[\"proposal_id\"]", "either express or implied. # See the License for the", "to a role \"\"\" owner = helper.api.user.current role = helper.api.role.create.new(user=owner)", "Sawtooth # # Licensed under the Apache License, Version 2.0", "return the proposal proposal = helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"] ==", "== result[\"proposal_id\"] assert proposal[\"status\"] == \"OPEN\" assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\"", "OR CONDITIONS OF ANY KIND, either express or implied. #", "owner) assert proposal[\"id\"] == result[\"proposal_id\"] assert proposal[\"status\"] == \"OPEN\" assert", "@pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member(): \"\"\" Test a user proposing to", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= helper.api.role.create.new(user=owner) user = helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data =", "proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\" Test proposing", "the License is distributed on an \"AS IS\" BASIS, #", "role with missing fields \"\"\" role, _ = helper.api.role.current user", "time import requests import pytest from rbac.common.logs import get_logger from", "import helper from tests.rbac.api.assertions import assert_api_error from tests.rbac.api.assertions import assert_api_success", "in compliance with the License. # You may obtain a", "\"\"\" Propose Role Add Member Test \"\"\" # pylint: disable=invalid-name", "License. # ----------------------------------------------------------------------------- \"\"\" Propose Role Add Member Test \"\"\"", "software # distributed under the License is distributed on an", "assert proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\" Test", "import assert_api_error from tests.rbac.api.assertions import assert_api_success from tests.rbac.api.assertions import assert_api_post_requires_auth", "user = helper.api.user.create.current url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {} response", "# Copyright 2018 Contributors to Hyperledger Sawtooth # # Licensed", "----------------------------------------------------------------------------- \"\"\" Propose Role Add Member Test \"\"\" # pylint:", ") result = assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5) # temporary until", "\"\"\" role, _ = helper.api.role.current user = helper.api.user.create.current url =", "a role \"\"\" owner = helper.api.user.current role = helper.api.role.create.new(user=owner) user", "# # Unless required by applicable law or agreed to", "user[\"token\"]}, json=data ) assert_api_error(response, \"Bad Request: id field is required\",", "themselves to a role \"\"\" owner = helper.api.user.current role =", "def test_api_propose_role_member_required_fields(): \"\"\" Test proposing adding a member to a", "Contributors to Hyperledger Sawtooth # # Licensed under the Apache", "\"\"\" Test proposing adding a member to a role with", "get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member(): \"\"\" Test a user proposing", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "governing permissions and # limitations under the License. # -----------------------------------------------------------------------------", "def test_api_propose_role_member(): \"\"\" Test a user proposing to add themselves", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "assert proposal[\"status\"] == \"OPEN\" assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert proposal[\"object\"]", "\"ADD_ROLE_MEMBER\" assert proposal[\"object\"] == role[\"id\"] assert proposal[\"target\"] == user[\"user_id\"] assert", "import pytest from rbac.common.logs import get_logger from tests.rbac import helper", "= helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"] == result[\"proposal_id\"] assert proposal[\"status\"] ==", "temporary until API refactored to return the proposal proposal =", "= {} response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data )", "\"\"\" Test a user proposing to add themselves to a", "Version 2.0 (the \"License\"); # you may not use this", "= helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {} response = requests.post( url=url, headers={\"Authorization\":", "url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {} response = requests.post( url=url,", "Hyperledger Sawtooth # # Licensed under the Apache License, Version", "@pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\" Test proposing adding a member", "= requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) assert_api_error(response, \"Bad Request:", "law or agreed to in writing, software # distributed under", "{} response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) assert_api_error(response,", "assert proposal[\"id\"] == result[\"proposal_id\"] assert proposal[\"status\"] == \"OPEN\" assert proposal[\"type\"]", "= requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data ) result = assert_api_success(response)", "url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data)", "helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url,", "missing fields \"\"\" role, _ = helper.api.role.current user = helper.api.user.create.current", "under the License. # ----------------------------------------------------------------------------- \"\"\" Propose Role Add Member", "Add Member Test \"\"\" # pylint: disable=invalid-name import time import", "proposing to add themselves to a role \"\"\" owner =", "Copyright 2018 Contributors to Hyperledger Sawtooth # # Licensed under", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "helper.api.user.current role = helper.api.role.create.new(user=owner) user = helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"])", "headers={\"Authorization\": user[\"token\"]}, json=data ) result = assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5)", "\"License\"); # you may not use this file except in", "user proposing to add themselves to a role \"\"\" owner", "\"OPEN\" assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert proposal[\"object\"] == role[\"id\"] assert", "a user proposing to add themselves to a role \"\"\"", "assert_api_post_requires_auth(url=url, json=data) response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data )", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "assert_api_post_requires_auth LOGGER = get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member(): \"\"\" Test", "helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {} response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]},", "helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data) response =", "Member Test \"\"\" # pylint: disable=invalid-name import time import requests", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "# limitations under the License. # ----------------------------------------------------------------------------- \"\"\" Propose Role", "tests.rbac.api.assertions import assert_api_success from tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER = get_logger(__name__)", "import assert_api_success from tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER = get_logger(__name__) @pytest.mark.api", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "helper.api.role.create.new(user=owner) user = helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\":", "refactored to return the proposal proposal = helper.api.proposal.get(result[\"proposal_id\"], owner) assert", "proposal[\"id\"] == result[\"proposal_id\"] assert proposal[\"status\"] == \"OPEN\" assert proposal[\"type\"] ==", "proposal[\"status\"] == \"OPEN\" assert proposal[\"type\"] == \"ADD_ROLE_MEMBER\" assert proposal[\"object\"] ==", "a member to a role with missing fields \"\"\" role,", "tests.rbac.api.assertions import assert_api_post_requires_auth LOGGER = get_logger(__name__) @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member():", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "= helper.api.user.current2 url = helper.api.role.member.propose.url(role_id=role[\"id\"]) data = {\"id\": user[\"user_id\"]} assert", "result = assert_api_success(response) assert result[\"proposal_id\"] time.sleep(0.5) # temporary until API", "json=data ) assert_api_error(response, \"Bad Request: id field is required\", 400)", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "proposal[\"object\"] == role[\"id\"] assert proposal[\"target\"] == user[\"user_id\"] assert proposal[\"opener\"] ==", "to in writing, software # distributed under the License is", "{\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data) response = requests.post( url=url, headers={\"Authorization\":", "to return the proposal proposal = helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"]", "from tests.rbac.api.assertions import assert_api_error from tests.rbac.api.assertions import assert_api_success from tests.rbac.api.assertions", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "pytest from rbac.common.logs import get_logger from tests.rbac import helper from", "= helper.api.user.current role = helper.api.role.create.new(user=owner) user = helper.api.user.current2 url =", "assert assert_api_post_requires_auth(url=url, json=data) response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data", "helper from tests.rbac.api.assertions import assert_api_error from tests.rbac.api.assertions import assert_api_success from", "limitations under the License. # ----------------------------------------------------------------------------- \"\"\" Propose Role Add", "You may obtain a copy of the License at #", "get_logger from tests.rbac import helper from tests.rbac.api.assertions import assert_api_error from", "language governing permissions and # limitations under the License. #", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "== user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\" Test proposing adding", "adding a member to a role with missing fields \"\"\"", "required by applicable law or agreed to in writing, software", "fields \"\"\" role, _ = helper.api.role.current user = helper.api.user.create.current url", "helper.api.proposal.get(result[\"proposal_id\"], owner) assert proposal[\"id\"] == result[\"proposal_id\"] assert proposal[\"status\"] == \"OPEN\"", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "user[\"user_id\"] assert proposal[\"opener\"] == user[\"user_id\"] @pytest.mark.api @pytest.mark.api_role def test_api_propose_role_member_required_fields(): \"\"\"", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "requests import pytest from rbac.common.logs import get_logger from tests.rbac import", "= {\"id\": user[\"user_id\"]} assert assert_api_post_requires_auth(url=url, json=data) response = requests.post( url=url,", "proposing adding a member to a role with missing fields", "the Apache License, Version 2.0 (the \"License\"); # you may", "data = {} response = requests.post( url=url, headers={\"Authorization\": user[\"token\"]}, json=data", "API refactored to return the proposal proposal = helper.api.proposal.get(result[\"proposal_id\"], owner)" ]
[ "# 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir, fourcc, fps,", "'/media/hy/Seagate Expansion Drive/Results/merge_dir/' # 帧存放路径 video_dir = '/media/hy/Seagate Expansion Drive/Results/sandy.mp4'", "im_list: im_name = os.path.join(im_dir + i) frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8),", "Image def frame2video(im_dir, video_dir, fps): im_list = os.listdir(im_dir) im_list.sort(key=lambda x:", "Image.open(os.path.join(im_dir, im_list[0])) img_size = img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID')", "PIL import Image def frame2video(im_dir, video_dir, fps): im_list = os.listdir(im_dir)", "x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img = Image.open(os.path.join(im_dir, im_list[0])) img_size = img.size", "Expansion Drive/Results/merge_dir/' # 帧存放路径 video_dir = '/media/hy/Seagate Expansion Drive/Results/sandy.mp4' #", "in im_list: im_name = os.path.join(im_dir + i) frame = cv2.imdecode(np.fromfile(im_name,", "帧存放路径 video_dir = '/media/hy/Seagate Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径 fps =", "video_dir = '/media/hy/Seagate Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径 fps = 15", "= cv2.VideoWriter(video_dir, fourcc, fps, img_size) for i in im_list: im_name", "i) frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release() if __name__", "video_dir, fps): im_list = os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img", "import os import numpy as np from PIL import Image", "# 合成视频存放的路径 fps = 15 # 帧率 frame2video(im_dir, video_dir, fps)", "videoWriter.release() if __name__ == '__main__': im_dir = '/media/hy/Seagate Expansion Drive/Results/merge_dir/'", "im_name = os.path.join(im_dir + i) frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1)", "img_size = img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter =", "im_dir = '/media/hy/Seagate Expansion Drive/Results/merge_dir/' # 帧存放路径 video_dir = '/media/hy/Seagate", "fourcc, fps, img_size) for i in im_list: im_name = os.path.join(im_dir", "cv2 import os import numpy as np from PIL import", "im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img = Image.open(os.path.join(im_dir, im_list[0])) img_size =", "im_list[0])) img_size = img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter", "img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir, fourcc,", "== '__main__': im_dir = '/media/hy/Seagate Expansion Drive/Results/merge_dir/' # 帧存放路径 video_dir", "i in im_list: im_name = os.path.join(im_dir + i) frame =", "-1) videoWriter.write(frame) videoWriter.release() if __name__ == '__main__': im_dir = '/media/hy/Seagate", "numpy as np from PIL import Image def frame2video(im_dir, video_dir,", "fps): im_list = os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img =", "dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release() if __name__ == '__main__': im_dir =", "+ i) frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release() if", "img = Image.open(os.path.join(im_dir, im_list[0])) img_size = img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc", "np from PIL import Image def frame2video(im_dir, video_dir, fps): im_list", "im_list = os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img = Image.open(os.path.join(im_dir,", "cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir, fourcc, fps, img_size) for i in", "= os.path.join(im_dir + i) frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame)", "os.path.join(im_dir + i) frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release()", "= Image.open(os.path.join(im_dir, im_list[0])) img_size = img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc =", "for i in im_list: im_name = os.path.join(im_dir + i) frame", "import cv2 import os import numpy as np from PIL", "import numpy as np from PIL import Image def frame2video(im_dir,", "os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img = Image.open(os.path.join(im_dir, im_list[0])) img_size", "fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir, fourcc, fps, img_size) for", "获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir, fourcc, fps, img_size)", "= os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img = Image.open(os.path.join(im_dir, im_list[0]))", "Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径 fps = 15 # 帧率 frame2video(im_dir,", "\"\").split('.')[0])) img = Image.open(os.path.join(im_dir, im_list[0])) img_size = img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致", "= cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release() if __name__ == '__main__':", "from PIL import Image def frame2video(im_dir, video_dir, fps): im_list =", "'/media/hy/Seagate Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径 fps = 15 # 帧率", "if __name__ == '__main__': im_dir = '/media/hy/Seagate Expansion Drive/Results/merge_dir/' #", "frame2video(im_dir, video_dir, fps): im_list = os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\", \"\").split('.')[0]))", "videoWriter.write(frame) videoWriter.release() if __name__ == '__main__': im_dir = '/media/hy/Seagate Expansion", "as np from PIL import Image def frame2video(im_dir, video_dir, fps):", "int(x.replace(\"_RBPNF7\", \"\").split('.')[0])) img = Image.open(os.path.join(im_dir, im_list[0])) img_size = img.size #", "def frame2video(im_dir, video_dir, fps): im_list = os.listdir(im_dir) im_list.sort(key=lambda x: int(x.replace(\"_RBPNF7\",", "frame = cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release() if __name__ ==", "= '/media/hy/Seagate Expansion Drive/Results/merge_dir/' # 帧存放路径 video_dir = '/media/hy/Seagate Expansion", "Drive/Results/sandy.mp4' # 合成视频存放的路径 fps = 15 # 帧率 frame2video(im_dir, video_dir,", "videoWriter = cv2.VideoWriter(video_dir, fourcc, fps, img_size) for i in im_list:", "= '/media/hy/Seagate Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径 fps = 15 #", "# 帧存放路径 video_dir = '/media/hy/Seagate Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径 fps", "img_size) for i in im_list: im_name = os.path.join(im_dir + i)", "Drive/Results/merge_dir/' # 帧存放路径 video_dir = '/media/hy/Seagate Expansion Drive/Results/sandy.mp4' # 合成视频存放的路径", "os import numpy as np from PIL import Image def", "__name__ == '__main__': im_dir = '/media/hy/Seagate Expansion Drive/Results/merge_dir/' # 帧存放路径", "import Image def frame2video(im_dir, video_dir, fps): im_list = os.listdir(im_dir) im_list.sort(key=lambda", "cv2.imdecode(np.fromfile(im_name, dtype=np.uint8), -1) videoWriter.write(frame) videoWriter.release() if __name__ == '__main__': im_dir", "cv2.VideoWriter(video_dir, fourcc, fps, img_size) for i in im_list: im_name =", "'__main__': im_dir = '/media/hy/Seagate Expansion Drive/Results/merge_dir/' # 帧存放路径 video_dir =", "= img.size # 获得图片分辨率,im_dir文件夹下的图片分辨率需要一致 fourcc = cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir,", "fps, img_size) for i in im_list: im_name = os.path.join(im_dir +", "= cv2.VideoWriter_fourcc(*'XVID') videoWriter = cv2.VideoWriter(video_dir, fourcc, fps, img_size) for i" ]